diff --git a/testbed/gradio-app__gradio/.changeset/README.md b/testbed/gradio-app__gradio/.changeset/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e5b6d8d6a67ad0dca8f20117fbfc72e076882d00 --- /dev/null +++ b/testbed/gradio-app__gradio/.changeset/README.md @@ -0,0 +1,8 @@ +# Changesets + +Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works +with multi-package repos, or single-package repos to help you version and publish your code. You can +find the full documentation for it [in our repository](https://github.com/changesets/changesets) + +We have a quick list of common questions to get you started engaging with this project in +[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) diff --git a/testbed/gradio-app__gradio/.changeset/changeset.cjs b/testbed/gradio-app__gradio/.changeset/changeset.cjs new file mode 100644 index 0000000000000000000000000000000000000000..a94b61d6342d432eb0668eaefa20bd8cbee845d0 --- /dev/null +++ b/testbed/gradio-app__gradio/.changeset/changeset.cjs @@ -0,0 +1,280 @@ +const { getPackagesSync } = require("@manypkg/get-packages"); +const gh = require("@changesets/get-github-info"); +const { existsSync, readFileSync, writeFileSync } = require("fs"); +const { join } = require("path"); + +const { getInfo, getInfoFromPullRequest } = gh; +const { packages, rootDir } = getPackagesSync(process.cwd()); + +/** + * @typedef {{packageJson: {name: string, python?: boolean}, dir: string}} Package + */ + +/** + * @typedef {{summary: string, id: string, commit: string, releases: {name: string}}} Changeset + */ + +/** + * + * @param {string} package_name The name of the package to find the directories for + * @returns {string[]} The directories for the package + */ +function find_packages_dirs(package_name) { + /** @type {string[]} */ + let package_dirs = []; + + /** @type {Package | undefined} */ + const _package = packages.find((p) => p.packageJson.name === package_name); + if (!_package) throw new Error(`Package ${package_name} not found`); + + package_dirs.push(_package.dir); + if (_package.packageJson.python) { + package_dirs.push(join(_package.dir, "..")); + } + return package_dirs; +} + +const changelogFunctions = { + /** + * + * @param {Changeset[]} changesets The changesets that have been created + * @param {any} dependenciesUpdated The dependencies that have been updated + * @param {any} options The options passed to the changelog generator + * @returns {Promise} The release line for the dependencies + */ + getDependencyReleaseLine: async ( + changesets, + dependenciesUpdated, + options + ) => { + if (!options.repo) { + throw new Error( + 'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]' + ); + } + if (dependenciesUpdated.length === 0) return ""; + + const changesetLink = `- Updated dependencies [${( + await Promise.all( + changesets.map(async (cs) => { + if (cs.commit) { + let { links } = await getInfo({ + repo: options.repo, + commit: cs.commit + }); + return links.commit; + } + }) + ) + ) + .filter((_) => _) + .join(", ")}]:`; + + const updatedDepenenciesList = dependenciesUpdated.map( + /** + * + * @param {any} dependency The dependency that has been updated + * @returns {string} The formatted dependency + */ + (dependency) => ` - ${dependency.name}@${dependency.newVersion}` + ); + + return [changesetLink, ...updatedDepenenciesList].join("\n"); + }, + /** + * + * @param {{summary: string, id: string, commit: string, releases: {name: string}[]}} changeset The changeset that has been created + * @param {any} type The type of changeset + * @param {any} options The options passed to the changelog generator + * @returns {Promise} The release line for the changeset + */ + getReleaseLine: async (changeset, type, options) => { + if (!options || !options.repo) { + throw new Error( + 'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]' + ); + } + + let prFromSummary; + let commitFromSummary; + /** + * @type {string[]} + */ + let usersFromSummary = []; + + const replacedChangelog = changeset.summary + .replace(/^\s*(?:pr|pull|pull\s+request):\s*#?(\d+)/im, (_, pr) => { + let num = Number(pr); + if (!isNaN(num)) prFromSummary = num; + return ""; + }) + .replace(/^\s*commit:\s*([^\s]+)/im, (_, commit) => { + commitFromSummary = commit; + return ""; + }) + .replace(/^\s*(?:author|user):\s*@?([^\s]+)/gim, (_, user) => { + usersFromSummary.push(user); + return ""; + }) + .trim(); + + const [firstLine, ...futureLines] = replacedChangelog + .split("\n") + .map((l) => l.trimRight()); + + const links = await (async () => { + if (prFromSummary !== undefined) { + let { links } = await getInfoFromPullRequest({ + repo: options.repo, + pull: prFromSummary + }); + if (commitFromSummary) { + links = { + ...links, + commit: `[\`${commitFromSummary}\`](https://github.com/${options.repo}/commit/${commitFromSummary})` + }; + } + return links; + } + const commitToFetchFrom = commitFromSummary || changeset.commit; + if (commitToFetchFrom) { + let { links } = await getInfo({ + repo: options.repo, + commit: commitToFetchFrom + }); + return links; + } + return { + commit: null, + pull: null, + user: null + }; + })(); + + const users = + usersFromSummary && usersFromSummary.length + ? usersFromSummary + .map( + (userFromSummary) => + `[@${userFromSummary}](https://github.com/${userFromSummary})` + ) + .join(", ") + : links.user; + + const prefix = [ + links.pull === null ? "" : `${links.pull}`, + links.commit === null ? "" : `${links.commit}` + ] + .join(" ") + .trim(); + + const suffix = users === null ? "" : ` Thanks ${users}!`; + + /** + * @typedef {{[key: string]: string[] | {dirs: string[], current_changelog: string, feat: {summary: string}[], fix: {summary: string}[], highlight: {summary: string}[]}}} ChangesetMeta + */ + + /** + * @type { ChangesetMeta & { _handled: string[] } }} + */ + let lines; + if (existsSync(join(rootDir, ".changeset", "_changelog.json"))) { + lines = JSON.parse( + readFileSync(join(rootDir, ".changeset", "_changelog.json"), "utf-8") + ); + } else { + lines = { + _handled: [] + }; + } + + if (lines._handled.includes(changeset.id)) { + return "done"; + } + lines._handled.push(changeset.id); + + changeset.releases.forEach((release) => { + if (!lines[release.name]) + lines[release.name] = { + dirs: find_packages_dirs(release.name), + current_changelog: "", + feat: [], + fix: [], + highlight: [] + }; + + const changelog_path = join( + //@ts-ignore + lines[release.name].dirs[1] || lines[release.name].dirs[0], + "CHANGELOG.md" + ); + + if (existsSync(changelog_path)) { + //@ts-ignore + lines[release.name].current_changelog = readFileSync( + changelog_path, + "utf-8" + ) + .replace(`# ${release.name}`, "") + .trim(); + } + + const [, _type, summary] = changeset.summary + .trim() + .match(/^(feat|fix|highlight)\s*:\s*([^]*)/im) || [ + , + false, + changeset.summary + ]; + + let formatted_summary = ""; + + if (_type === "highlight") { + const [heading, ...rest] = summary.trim().split("\n"); + const _heading = `${heading} ${prefix ? `(${prefix})` : ""}`; + const _rest = rest.concat(["", suffix]); + + formatted_summary = `${_heading}\n${_rest.join("\n")}`; + } else { + formatted_summary = handle_line(summary, prefix, suffix); + } + + //@ts-ignore + lines[release.name][_type].push({ + summary: formatted_summary + }); + }); + + writeFileSync( + join(rootDir, ".changeset", "_changelog.json"), + JSON.stringify(lines, null, 2) + ); + + return `\n\n-${prefix ? `${prefix} -` : ""} ${firstLine}\n${futureLines + .map((l) => ` ${l}`) + .join("\n")}`; + } +}; + +/** + * @param {string} str The changelog entry + * @param {string} prefix The prefix to add to the first line + * @param {string} suffix The suffix to add to the last line + * @returns {string} The formatted changelog entry + */ +function handle_line(str, prefix, suffix) { + const [_s, ...lines] = str.split("\n").filter(Boolean); + + const desc = `${prefix ? `${prefix} -` : ""} ${_s.replace( + /[\s\.]$/, + "" + )}. ${suffix}`; + + if (_s.length === 1) { + return desc; + } + + return [desc, ...lines.map((l) => ` ${l}`)].join("/n"); +} + +module.exports = changelogFunctions; diff --git a/testbed/gradio-app__gradio/.changeset/config.json b/testbed/gradio-app__gradio/.changeset/config.json new file mode 100644 index 0000000000000000000000000000000000000000..d2ba85f6bb23a7c7a64595f488aa35eed7441f7f --- /dev/null +++ b/testbed/gradio-app__gradio/.changeset/config.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@2.3.0/schema.json", + "changelog": ["./changeset.cjs", { "repo": "gradio-app/gradio" }], + "commit": false, + "fixed": [], + "linked": [], + "access": "public", + "baseBranch": "main", + "updateInternalDependencies": "patch", + "ignore": ["@gradio/spaces-test", "@gradio/cdn-test"] +} diff --git a/testbed/gradio-app__gradio/.changeset/fix_changelogs.cjs b/testbed/gradio-app__gradio/.changeset/fix_changelogs.cjs new file mode 100644 index 0000000000000000000000000000000000000000..202ce30462a0e2fc9e868f56b1e5df73a465a803 --- /dev/null +++ b/testbed/gradio-app__gradio/.changeset/fix_changelogs.cjs @@ -0,0 +1,122 @@ +const { join } = require("path"); +const { readFileSync, existsSync, writeFileSync, unlinkSync } = require("fs"); +const { getPackagesSync } = require("@manypkg/get-packages"); + +const RE_PKG_NAME = /^[\w-]+\b/; +const pkg_meta = getPackagesSync(process.cwd()); + +/** + * @typedef {{dirs: string[], highlight: {summary: string}[], feat: {summary: string}[], fix: {summary: string}[], current_changelog: string}} ChangesetMeta + */ + +/** + * @typedef {{[key: string]: ChangesetMeta}} ChangesetMetaCollection + */ + +function run() { + if (!existsSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json"))) { + console.warn("No changesets to process"); + return; + } + + /** + * @type { ChangesetMetaCollection & { _handled: string[] } }} + */ + const { _handled, ...packages } = JSON.parse( + readFileSync( + join(pkg_meta.rootDir, ".changeset", "_changelog.json"), + "utf-8" + ) + ); + + /** + * @typedef { {packageJson: {name: string, version: string, python: boolean}, dir: string} } PackageMeta + */ + + /** + * @type { {[key:string]: PackageMeta} } + */ + const all_packages = pkg_meta.packages.reduce((acc, pkg) => { + acc[pkg.packageJson.name] = /**@type {PackageMeta} */ ( + /** @type {unknown} */ (pkg) + ); + return acc; + }, /** @type {{[key:string] : PackageMeta}} */ ({})); + + for (const pkg_name in packages) { + const { dirs, highlight, feat, fix, current_changelog } = + /**@type {ChangesetMeta} */ (packages[pkg_name]); + + const { version, python } = all_packages[pkg_name].packageJson; + + const highlights = highlight.map((h) => `${h.summary}`); + const features = feat.map((f) => `- ${f.summary}`); + const fixes = fix.map((f) => `- ${f.summary}`); + + const release_notes = /** @type {[string[], string][]} */ ([ + [highlights, "### Highlights"], + [features, "### Features"], + [fixes, "### Fixes"] + ]) + .filter(([s], i) => s.length > 0) + .map(([lines, title]) => { + if (title === "### Highlights") { + return `${title}\n\n${lines.join("\n\n")}`; + } + + return `${title}\n\n${lines.join("\n")}`; + }) + .join("\n\n"); + + const new_changelog = `# ${pkg_name} + +## ${version} + +${release_notes} + +${current_changelog.replace(`# ${pkg_name}`, "").trim()} +`.trim(); + + dirs.forEach((dir) => { + writeFileSync(join(dir, "CHANGELOG.md"), new_changelog); + }); + + if (python) { + bump_local_dependents(pkg_name, version); + } + } + + unlinkSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json")); + + /** + * @param {string} pkg_to_bump The name of the package to bump + * @param {string} version The version to bump to + * @returns {void} + * */ + function bump_local_dependents(pkg_to_bump, version) { + for (const pkg_name in all_packages) { + const { + dir, + packageJson: { python } + } = all_packages[pkg_name]; + + if (!python) continue; + + const requirements_path = join(dir, "..", "requirements.txt"); + const requirements = readFileSync(requirements_path, "utf-8").split("\n"); + + const pkg_index = requirements.findIndex((line) => { + const m = line.trim().match(RE_PKG_NAME); + if (!m) return false; + return m[0] === pkg_to_bump; + }); + + if (pkg_index !== -1) { + requirements[pkg_index] = `${pkg_to_bump}==${version}`; + writeFileSync(requirements_path, requirements.join("\n")); + } + } + } +} + +run(); diff --git a/testbed/gradio-app__gradio/.config/.prettierignore b/testbed/gradio-app__gradio/.config/.prettierignore new file mode 100644 index 0000000000000000000000000000000000000000..9914c591279fb12b99c7e4a1f1f170393017723e --- /dev/null +++ b/testbed/gradio-app__gradio/.config/.prettierignore @@ -0,0 +1,27 @@ +**/js/app/public/** +**/pnpm-workspace.yaml +**/js/app/dist/** +**/js/wasm/dist/** +**/client/js/dist/** +**/js/lite/dist/** +**/pnpm-lock.yaml +**/js/plot/src/Plot.svelte +**/.svelte-kit/** +**/demo/** +**/gradio/** +**/.pnpm-store/** +**/.venv/** +**/.github/** +/guides/** +**/.mypy_cache/** +!test-strategy.md +**/js/_space-test/** +../js/app/src/lite/theme.css +../js/storybook/theme.css +**/gradio_cached_examples/** +**/storybook-static/** +**/.vscode/** +sweep.yaml +**/.vercel/** +**/build/** +**/*.md \ No newline at end of file diff --git a/testbed/gradio-app__gradio/.config/.prettierrc.json b/testbed/gradio-app__gradio/.config/.prettierrc.json new file mode 100644 index 0000000000000000000000000000000000000000..0e8b0ad886052fcbf88e07df76b2f2ab0969e418 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/.prettierrc.json @@ -0,0 +1,7 @@ +{ + "useTabs": true, + "singleQuote": false, + "trailingComma": "none", + "printWidth": 80, + "plugins": ["prettier-plugin-svelte"] +} diff --git a/testbed/gradio-app__gradio/.config/basevite.config.ts b/testbed/gradio-app__gradio/.config/basevite.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..5e12d3264312d544485877eecb53192f45043b8d --- /dev/null +++ b/testbed/gradio-app__gradio/.config/basevite.config.ts @@ -0,0 +1,91 @@ +import { defineConfig } from "vite"; +import { svelte } from "@sveltejs/vite-plugin-svelte"; +import sveltePreprocess from "svelte-preprocess"; +// @ts-ignore +import custom_media from "postcss-custom-media"; +import global_data from "@csstools/postcss-global-data"; +// @ts-ignore +import prefixer from "postcss-prefix-selector"; +import { readFileSync } from "fs"; +import { join } from "path"; +import { fileURLToPath } from "url"; + +const __dirname = fileURLToPath(new URL(".", import.meta.url)); +const version_path = join(__dirname, "..", "gradio", "package.json"); +const theme_token_path = join( + __dirname, + "..", + "js", + "theme", + "src", + "tokens.css" +); + +const version = JSON.parse(readFileSync(version_path, { encoding: 'utf-8' })).version.trim().replace(/\./g, '-'); + +//@ts-ignore +export default defineConfig(({ mode }) => { + const production = + mode === "production:cdn" || + mode === "production:local" || + mode === "production:website"; + + return { + server: { + port: 9876 + }, + + build: { + sourcemap: false, + target: "esnext", + minify: production + }, + define: { + BUILD_MODE: production ? JSON.stringify("prod") : JSON.stringify("dev"), + BACKEND_URL: production + ? JSON.stringify("") + : JSON.stringify("http://localhost:7860/"), + GRADIO_VERSION: JSON.stringify(version) + }, + css: { + postcss: { + plugins: [ + prefixer({ + prefix: `.gradio-container-${version}`, + // @ts-ignore + transform(prefix, selector, prefixedSelector, fileName) { + if (selector.indexOf("gradio-container") > -1) { + return prefix; + } else if ( + selector.indexOf(":root") > -1 || + selector.indexOf("dark") > -1 || + fileName.indexOf(".svelte") > -1 + ) { + return selector; + } + return prefixedSelector; + } + }), + custom_media() + ] + } + }, + plugins: [ + svelte({ + inspector: true, + compilerOptions: { + dev: !production + }, + hot: !process.env.VITEST && !production, + preprocess: sveltePreprocess({ + postcss: { + plugins: [ + global_data({ files: [theme_token_path] }), + custom_media() + ] + } + }) + }) + ] + }; +}); diff --git a/testbed/gradio-app__gradio/.config/eslint.config.js b/testbed/gradio-app__gradio/.config/eslint.config.js new file mode 100644 index 0000000000000000000000000000000000000000..41f34981991b337c97e2b96719f51182227e4994 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/eslint.config.js @@ -0,0 +1,142 @@ +import globals from "globals"; +import ts_plugin from "@typescript-eslint/eslint-plugin"; +import js_plugin from "@eslint/js"; + +import typescriptParser from "@typescript-eslint/parser"; +import sveltePlugin from "eslint-plugin-svelte"; +import svelteParser from "svelte-eslint-parser"; + +const ts_rules_disabled = Object.fromEntries( + Object.keys(ts_plugin.rules).map((rule) => [ + `@typescript-eslint/${rule}`, + "off" + ]) +); +const js_rules_disabled = Object.fromEntries( + Object.keys(js_plugin.configs.all.rules).map((rule) => [rule, "off"]) +); + +const js_rules = { + ...js_rules_disabled, + "no-console": ["error", { allow: ["warn", "error", "debug"] }], + "no-constant-condition": "error", + "no-dupe-args": "error", + "no-extra-boolean-cast": "error", + "no-unexpected-multiline": "error", + "no-unreachable": "error", + "valid-jsdoc": "error", + "array-callback-return": "error", + complexity: "error", + "no-else-return": "error", + "no-useless-return": "error", + "no-undef": "error" +}; + +const ts_rules = { + ...ts_rules_disabled, + "@typescript-eslint/adjacent-overload-signatures": "error", + "@typescript-eslint/explicit-function-return-type": [ + "error", + { allowExpressions: true } + ], + "@typescript-eslint/consistent-type-exports": "error", + "@typescript-eslint/ban-types": "error", + "@typescript-eslint/array-type": "error", + "@typescript-eslint/no-inferrable-types": "error" +}; + +const { browser, es2021, node } = globals; + +export default [ + { + ignores: [ + ".svelte-kit/**/*", + "**/node_modules/**", + "**/dist/**", + "**/.config/*", + "**/*.spec.ts", + "**/*.test.ts", + "**/*.node-test.ts", + "js/app/test/**/*", + "**/*vite.config.ts", + "**/_website/**/*", + "**/_spaces-test/**/*" + ] + }, + { + files: ["**/*.js", "**/*.cjs"], + languageOptions: { + globals: { + ...browser, + ...es2021, + ...node + } + }, + + plugins: { + "eslint:recommended": js_plugin + }, + rules: js_rules + }, + + { + files: ["**/*.ts"], + languageOptions: { + parser: typescriptParser, + parserOptions: { + project: "./tsconfig.json", + extraFileExtensions: [".svelte"] + }, + globals: { + ...browser, + ...es2021, + ...node + } + }, + + plugins: { + "@typescript-eslint": ts_plugin, + "eslint:recommended": js_plugin + }, + rules: { + ...ts_rules, + ...js_rules, + "no-undef": "off" + } + }, + { + files: ["**/client/js/**"], + languageOptions: { + parserOptions: { + project: "./client/js/tsconfig.json" + } + } + }, + { + files: ["**/*.svelte"], + languageOptions: { + parser: svelteParser, + parserOptions: { + parser: typescriptParser, + project: "./tsconfig.json", + extraFileExtensions: [".svelte"] + }, + globals: { + ...browser, + ...es2021 + } + }, + plugins: { + svelte: sveltePlugin, + "@typescript-eslint": ts_plugin, + "eslint:recommended": js_plugin + }, + rules: { + ...ts_rules, + ...js_rules, + ...sveltePlugin.configs.recommended.rules, + "svelte/no-at-html-tags": "off", + "no-undef": "off" + } + } +]; diff --git a/testbed/gradio-app__gradio/.config/playwright-ct.config.ts b/testbed/gradio-app__gradio/.config/playwright-ct.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..3f89855797a04fe2a9cab4e781728fd02279e046 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/playwright-ct.config.ts @@ -0,0 +1,41 @@ +import { defineConfig, devices } from "@playwright/experimental-ct-svelte"; +import config from "./basevite.config"; + +/** + * See https://playwright.dev/docs/test-configuration. + */ +export default defineConfig({ + testDir: "../", + /* The base directory, relative to the config file, for snapshot files created with toMatchSnapshot and toHaveScreenshot. */ + snapshotDir: "./__snapshots__", + /* Maximum time one test can run for. */ + timeout: 10 * 1000, + /* Run tests in files in parallel */ + fullyParallel: true, + /* Fail the build on CI if you accidentally left test.only in the source code. */ + forbidOnly: !!process.env.CI, + /* Retry on CI only */ + retries: process.env.CI ? 2 : 0, + /* Opt out of parallel tests on CI. */ + workers: process.env.CI ? 1 : undefined, + /* Reporter to use. See https://playwright.dev/docs/test-reporters */ + reporter: "html", + /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ + use: { + /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ + trace: "on-first-retry", + + /* Port to use for Playwright component endpoint. */ + ctPort: 3100, + ctViteConfig: config({ mode: "development" }) + }, + testMatch: "*.component.spec.ts", + + /* Configure projects for major browsers */ + projects: [ + { + name: "chromium", + use: { ...devices["Desktop Chrome"] } + } + ] +}); diff --git a/testbed/gradio-app__gradio/.config/playwright-setup.js b/testbed/gradio-app__gradio/.config/playwright-setup.js new file mode 100644 index 0000000000000000000000000000000000000000..a4503d7c16989c899a88742bfd7ff53af1657a47 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/playwright-setup.js @@ -0,0 +1,151 @@ +import { spawn } from "node:child_process"; +import { join, basename } from "path"; +import { fileURLToPath } from "url"; +import { readdirSync, writeFileSync } from "fs"; +import net from "net"; + +import kl from "kleur"; + +const __dirname = fileURLToPath(new URL(".", import.meta.url)); +const TEST_APP_PATH = join(__dirname, "./test.py"); +const TEST_FILES_PATH = join(__dirname, "..", "js", "app", "test"); +const ROOT = join(__dirname, ".."); + +const test_files = readdirSync(TEST_FILES_PATH) + .filter( + (f) => + f.endsWith("spec.ts") && + !f.endsWith(".skip.spec.ts") && + !f.endsWith(".component.spec.ts") + ) + .map((f) => basename(f, ".spec.ts")); + +export default async function global_setup() { + const verbose = process.env.GRADIO_TEST_VERBOSE; + + const port = await find_free_port(7860, 8860); + process.env.GRADIO_E2E_TEST_PORT = port; + + process.stdout.write(kl.yellow("\nCreating test gradio app.\n\n")); + + const test_app = make_app(test_files, port); + process.stdout.write(kl.yellow("App created. Starting test server.\n\n")); + + process.stdout.write(kl.bgBlue(" =========================== \n")); + process.stdout.write(kl.bgBlue(" === PYTHON STARTUP LOGS === \n")); + process.stdout.write(kl.bgBlue(" =========================== \n\n")); + + writeFileSync(TEST_APP_PATH, test_app); + + const app = await spawn_gradio_app(TEST_APP_PATH, port, verbose); + + process.stdout.write( + kl.green(`\n\nServer started. Running tests on port ${port}.\n`) + ); + + return () => { + process.stdout.write(kl.green(`\nTests complete, cleaning up!\n`)); + + kill_process(app); + }; +} +const INFO_RE = /^INFO:/; + +function spawn_gradio_app(app, port, verbose) { + const PORT_RE = new RegExp(`:${port}`); + + return new Promise((res, rej) => { + const _process = spawn(`python`, [app], { + shell: true, + stdio: "pipe", + cwd: ROOT, + env: { + ...process.env, + GRADIO_SERVER_PORT: `7879`, + PYTHONUNBUFFERED: "true" + } + }); + _process.stdout.setEncoding("utf8"); + + function std_out(data) { + const _data = data.toString(); + const is_info = INFO_RE.test(_data); + + if (is_info) { + process.stdout.write(kl.yellow(_data)); + } + + if (!is_info) { + process.stdout.write(`${_data}\n`); + } + + if (PORT_RE.test(_data)) { + process.stdout.write(kl.bgBlue("\n =========== END =========== ")); + + res(_process); + + if (!verbose) { + _process.stdout.off("data", std_out); + _process.stderr.off("data", std_out); + } + } + } + + _process.stdout.on("data", std_out); + _process.stderr.on("data", std_out); + _process.on("exit", () => kill_process(_process)); + _process.on("close", () => kill_process(_process)); + _process.on("disconnect", () => kill_process(_process)); + }); +} + +function kill_process(process) { + process.kill("SIGKILL"); +} + +function make_app(demos, port) { + return `import gradio as gr +import uvicorn +from fastapi import FastAPI +import gradio as gr +${demos.map((d) => `from demo.${d}.run import demo as ${d}`).join("\n")} + +app = FastAPI() +${demos + .map((d) => `app = gr.mount_gradio_app(app, ${d}, path="/${d}")`) + .join("\n")} + +config = uvicorn.Config(app, port=${port}, log_level="info") +server = uvicorn.Server(config=config) +server.run()`; +} + +export async function find_free_port(start_port, end_port) { + for (let port = start_port; port < end_port; port++) { + if (await is_free_port(port)) { + return port; + } + } + + throw new Error( + `Could not find free ports: there were not enough ports available.` + ); +} + +export function is_free_port(port) { + return new Promise((accept, reject) => { + const sock = net.createConnection(port, "127.0.0.1"); + sock.once("connect", () => { + sock.end(); + accept(false); + }); + sock.once("error", (e) => { + sock.destroy(); + if (e.code === "ECONNREFUSED") { + accept(true); + } else { + reject(e); + } + }); + }); +} diff --git a/testbed/gradio-app__gradio/.config/playwright.config.js b/testbed/gradio-app__gradio/.config/playwright.config.js new file mode 100644 index 0000000000000000000000000000000000000000..cf8fb79278b1be9fd9ec8de3ed041fd561381f4f --- /dev/null +++ b/testbed/gradio-app__gradio/.config/playwright.config.js @@ -0,0 +1,9 @@ +export default { + use: { + screenshot: "only-on-failure", + trace: "retain-on-failure" + }, + testMatch: /.*.spec.ts/, + testDir: "..", + globalSetup: "./playwright-setup.js" +}; diff --git a/testbed/gradio-app__gradio/.config/playwright/index.html b/testbed/gradio-app__gradio/.config/playwright/index.html new file mode 100644 index 0000000000000000000000000000000000000000..229f296a9e5a3df9f93a2fe378fc31e3b4fff879 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/playwright/index.html @@ -0,0 +1,12 @@ + + + + + + Testing Page + + +
+ + + diff --git a/testbed/gradio-app__gradio/.config/playwright/index.ts b/testbed/gradio-app__gradio/.config/playwright/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..ac6de14bf2ed6d010d48977f7e17bb756307d5ce --- /dev/null +++ b/testbed/gradio-app__gradio/.config/playwright/index.ts @@ -0,0 +1,2 @@ +// Import styles, initialize component theme here. +// import '../src/common.css'; diff --git a/testbed/gradio-app__gradio/.config/postcss.config.cjs b/testbed/gradio-app__gradio/.config/postcss.config.cjs new file mode 100644 index 0000000000000000000000000000000000000000..81b1976568fb4a99716139d8c63d14a8f5d99390 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/postcss.config.cjs @@ -0,0 +1,8 @@ +const tailwindcss = require("tailwindcss"); +const autoprefixer = require("autoprefixer"); +const nested = require("tailwindcss/nesting"); +const tw_config = require("./tailwind.config.cjs"); + +module.exports = { + plugins: [nested, tailwindcss(tw_config), autoprefixer] +}; diff --git a/testbed/gradio-app__gradio/.config/setup_vite_tests.ts b/testbed/gradio-app__gradio/.config/setup_vite_tests.ts new file mode 100644 index 0000000000000000000000000000000000000000..0917122266f78918d2d43103a2d060191f104c2a --- /dev/null +++ b/testbed/gradio-app__gradio/.config/setup_vite_tests.ts @@ -0,0 +1,11 @@ +import type { TestingLibraryMatchers } from "@testing-library/jest-dom/matchers"; +import matchers from "@testing-library/jest-dom/matchers"; +import { expect } from "vitest"; + +declare module "vitest" { + interface Assertion + extends jest.Matchers, + TestingLibraryMatchers {} +} + +expect.extend(matchers); diff --git a/testbed/gradio-app__gradio/.config/tailwind.config.cjs b/testbed/gradio-app__gradio/.config/tailwind.config.cjs new file mode 100644 index 0000000000000000000000000000000000000000..fe92c25a4ef936efc0d55db7bc77b6ecb8c3a1d0 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/tailwind.config.cjs @@ -0,0 +1,12 @@ +module.exports = { + content: [ + "./src/**/*.{html,js,svelte,ts}", + "**/@gradio/**/*.{html,js,svelte,ts}" + ], + + theme: { + extend: {} + }, + + plugins: [require("@tailwindcss/forms")] +}; diff --git a/testbed/gradio-app__gradio/.config/vitest.config.ts b/testbed/gradio-app__gradio/.config/vitest.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..9de93ad0ec2eb93ff2a5b755d9c99ad9b837b4a9 --- /dev/null +++ b/testbed/gradio-app__gradio/.config/vitest.config.ts @@ -0,0 +1,3 @@ +import config from "../js/app/vite.config"; + +export default config; diff --git a/testbed/gradio-app__gradio/.devcontainer/devcontainer.json b/testbed/gradio-app__gradio/.devcontainer/devcontainer.json new file mode 100644 index 0000000000000000000000000000000000000000..6dafa31d21d36771530cc98a1a759ad2800a46c5 --- /dev/null +++ b/testbed/gradio-app__gradio/.devcontainer/devcontainer.json @@ -0,0 +1,41 @@ +// See https://containers.dev +{ + "name": "Python 3", + "image": "mcr.microsoft.com/devcontainers/python:0-3.9", + + // See https://containers.dev/features + "features": { + "ghcr.io/devcontainers/features/git:1": {}, + "ghcr.io/devcontainers/features/node:1": {}, + "ghcr.io/devcontainers-contrib/features/ffmpeg-apt-get:1": {} + }, + + "hostRequirements": { + "cpus": 4, + "memory": "8gb", + "storage": "32gb" + }, + + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance", + "ms-python.black-formatter", + "ms-toolsai.jupyter", + "esbenp.prettier-vscode", + "svelte.svelte-vscode", + "phoenisx.cssvar" + ], + "remote.autoForwardPorts": false + } + }, + + "forwardPorts": [7860, 9876], + "portsAttributes": { + "7860": { "label": "gradio port" }, + "9876": { "label": "gradio dev port" } + }, + + "postCreateCommand": "export NODE_OPTIONS=\"--max-old-space-size=8192\" && chmod +x scripts/install_gradio.sh scripts/install_test_requirements.sh scripts/build_frontend.sh && ./scripts/install_gradio.sh && ./scripts/install_test_requirements.sh && ./scripts/build_frontend.sh" +} diff --git a/testbed/gradio-app__gradio/README.md b/testbed/gradio-app__gradio/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a49e68a2e82d2751aa728fdb42cc5fd05d6e4e1f --- /dev/null +++ b/testbed/gradio-app__gradio/README.md @@ -0,0 +1,323 @@ + + +
+ +[gradio](https://gradio.app)
+Build & share delightful machine learning apps easily + +[![gradio-backend](https://github.com/gradio-app/gradio/actions/workflows/backend.yml/badge.svg)](https://github.com/gradio-app/gradio/actions/workflows/backend.yml) +[![gradio-ui](https://github.com/gradio-app/gradio/actions/workflows/ui.yml/badge.svg)](https://github.com/gradio-app/gradio/actions/workflows/ui.yml) + [![PyPI](https://img.shields.io/pypi/v/gradio)](https://pypi.org/project/gradio/) +[![PyPI downloads](https://img.shields.io/pypi/dm/gradio)](https://pypi.org/project/gradio/) +![Python version](https://img.shields.io/badge/python-3.8+-important) +[![Twitter follow](https://img.shields.io/twitter/follow/gradio?style=social&label=follow)](https://twitter.com/gradio) + +[Website](https://gradio.app) +| [Documentation](https://gradio.app/docs/) +| [Guides](https://gradio.app/guides/) +| [Getting Started](https://gradio.app/getting_started/) +| [Examples](demo/) +| [中文](readme_files/zh-cn#readme) + +
+ +# Gradio: Build Machine Learning Web Apps — in Python + +Gradio is an open-source Python library that is used to build machine learning and data science demos and web applications. + +With Gradio, you can quickly create a beautiful user interface around your machine learning models or data science workflow and let people "try it out" by dragging-and-dropping in their own images, +pasting text, recording their own voice, and interacting with your demo, all through the browser. + +![Interface montage](readme_files/header-image.jpg) + +Gradio is useful for: + +- **Demoing** your machine learning models for clients/collaborators/users/students. + +- **Deploying** your models quickly with automatic shareable links and getting feedback on model performance. + +- **Debugging** your model interactively during development using built-in manipulation and interpretation tools. + +## Quickstart + +**Prerequisite**: Gradio requires Python 3.8 or higher, that's all! + +### What Does Gradio Do? + +One of the _best ways to share_ your machine learning model, API, or data science workflow with others is to create an **interactive app** that allows your users or colleagues to try out the demo in their browsers. + +Gradio allows you to **build demos and share them, all in Python.** And usually in just a few lines of code! So let's get started. + +### Hello, World + +To get Gradio running with a simple "Hello, World" example, follow these three steps: + +1\. Install Gradio using pip: + +```bash +pip install gradio +``` + +2\. Run the code below as a Python script or in a Jupyter Notebook (or [Google Colab](https://colab.research.google.com/drive/18ODkJvyxHutTN0P5APWyGFO_xwNcgHDZ?usp=sharing)): + +```python +import gradio as gr + +def greet(name): + return "Hello " + name + "!" + +demo = gr.Interface(fn=greet, inputs="text", outputs="text") + +demo.launch() +``` + + +We shorten the imported name to `gr` for better readability of code using Gradio. This is a widely adopted convention that you should follow so that anyone working with your code can easily understand it. + +3\. The demo below will appear automatically within the Jupyter Notebook, or pop in a browser on [http://localhost:7860](http://localhost:7860) if running from a script: + +![`hello_world` demo](demo/hello_world/screenshot.gif) + +When developing locally, if you want to run the code as a Python script, you can use the Gradio CLI to launch the application **in reload mode**, which will provide seamless and fast development. Learn more about reloading in the [Auto-Reloading Guide](https://gradio.app/developing-faster-with-reload-mode/). + +```bash +gradio app.py +``` + +Note: you can also do `python app.py`, but it won't provide the automatic reload mechanism. + +### The `Interface` Class + +You'll notice that in order to make the demo, we created a `gr.Interface`. This `Interface` class can wrap any Python function with a user interface. In the example above, we saw a simple text-based function, but the function could be anything from music generator to a tax calculator to the prediction function of a pretrained machine learning model. + +The core `Interface` class is initialized with three required parameters: + +- `fn`: the function to wrap a UI around +- `inputs`: which component(s) to use for the input (e.g. `"text"`, `"image"` or `"audio"`) +- `outputs`: which component(s) to use for the output (e.g. `"text"`, `"image"` or `"label"`) + +Let's take a closer look at these components used to provide input and output. + +### Components Attributes + +We saw some simple `Textbox` components in the previous examples, but what if you want to change how the UI components look or behave? + +Let's say you want to customize the input text field — for example, you wanted it to be larger and have a text placeholder. If we use the actual class for `Textbox` instead of using the string shortcut, you have access to much more customizability through component attributes. + +```python +import gradio as gr + +def greet(name): + return "Hello " + name + "!" + +demo = gr.Interface( + fn=greet, + inputs=gr.Textbox(lines=2, placeholder="Name Here..."), + outputs="text", +) +demo.launch() +``` + +![`hello_world_2` demo](demo/hello_world_2/screenshot.gif) + +### Multiple Input and Output Components + +Suppose you had a more complex function, with multiple inputs and outputs. In the example below, we define a function that takes a string, boolean, and number, and returns a string and number. Take a look how you pass a list of input and output components. + +```python +import gradio as gr + +def greet(name, is_morning, temperature): + salutation = "Good morning" if is_morning else "Good evening" + greeting = f"{salutation} {name}. It is {temperature} degrees today" + celsius = (temperature - 32) * 5 / 9 + return greeting, round(celsius, 2) + +demo = gr.Interface( + fn=greet, + inputs=["text", "checkbox", gr.Slider(0, 100)], + outputs=["text", "number"], +) +demo.launch() +``` + +![`hello_world_3` demo](demo/hello_world_3/screenshot.gif) + +You simply wrap the components in a list. Each component in the `inputs` list corresponds to one of the parameters of the function, in order. Each component in the `outputs` list corresponds to one of the values returned by the function, again in order. + +### An Image Example + +Gradio supports many types of components, such as `Image`, `DataFrame`, `Video`, or `Label`. Let's try an image-to-image function to get a feel for these! + +```python +import numpy as np +import gradio as gr + +def sepia(input_img): + sepia_filter = np.array([ + [0.393, 0.769, 0.189], + [0.349, 0.686, 0.168], + [0.272, 0.534, 0.131] + ]) + sepia_img = input_img.dot(sepia_filter.T) + sepia_img /= sepia_img.max() + return sepia_img + +demo = gr.Interface(sepia, gr.Image(shape=(200, 200)), "image") +demo.launch() +``` + +![`sepia_filter` demo](demo/sepia_filter/screenshot.gif) + +When using the `Image` component as input, your function will receive a NumPy array with the shape `(height, width, 3)`, where the last dimension represents the RGB values. We'll return an image as well in the form of a NumPy array. + +You can also set the datatype used by the component with the `type=` keyword argument. For example, if you wanted your function to take a file path to an image instead of a NumPy array, the input `Image` component could be written as: + +```python +gr.Image(type="filepath", shape=...) +``` + +Also note that our input `Image` component comes with an edit button 🖉, which allows for cropping and zooming into images. Manipulating images in this way can help reveal biases or hidden flaws in a machine learning model! + +You can read more about the many components and how to use them in the [Gradio docs](https://gradio.app/docs). + +### Chatbots + +Gradio includes a high-level class, `gr.ChatInterface`, which is similar to `gr.Interface`, but is specifically designed for chatbot UIs. The `gr.ChatInterface` class also wraps a function but this function must have a specific signature. The function should take two arguments: `message` and then `history` (the arguments can be named anything, but must be in this order) + +- `message`: a `str` representing the user's input +- `history`: a `list` of `list` representing the conversations up until that point. Each inner list consists of two `str` representing a pair: `[user input, bot response]`. + +Your function should return a single string response, which is the bot's response to the particular user input `message`. + +Other than that, `gr.ChatInterface` has no required parameters (though several are available for customization of the UI). + +Here's a toy example: + +```python +import random +import gradio as gr + +def random_response(message, history): + return random.choice(["Yes", "No"]) + +demo = gr.ChatInterface(random_response) + +demo.launch() +``` + +![`chatinterface_random_response` demo](demo/chatinterface_random_response/screenshot.gif) + +You can [read more about `gr.ChatInterface` here](https://gradio.app/guides/creating-a-chatbot-fast). + +### Blocks: More Flexibility and Control + +Gradio offers two approaches to build apps: + +1\. **Interface** and **ChatInterface**, which provide a high-level abstraction for creating demos that we've been discussing so far. + +2\. **Blocks**, a low-level API for designing web apps with more flexible layouts and data flows. Blocks allows you to do things like feature multiple data flows and demos, control where components appear on the page, handle complex data flows (e.g. outputs can serve as inputs to other functions), and update properties/visibility of components based on user interaction — still all in Python. If this customizability is what you need, try `Blocks` instead! + +### Hello, Blocks + +Let's take a look at a simple example. Note how the API here differs from `Interface`. + +```python +import gradio as gr + +def greet(name): + return "Hello " + name + "!" + +with gr.Blocks() as demo: + name = gr.Textbox(label="Name") + output = gr.Textbox(label="Output Box") + greet_btn = gr.Button("Greet") + greet_btn.click(fn=greet, inputs=name, outputs=output, api_name="greet") + +demo.launch() +``` + +![`hello_blocks` demo](demo/hello_blocks/screenshot.gif) + +Things to note: + +- `Blocks` are made with a `with` clause, and any component created inside this clause is automatically added to the app. +- Components appear vertically in the app in the order they are created. (Later we will cover customizing layouts!) +- A `Button` was created, and then a `click` event-listener was added to this button. The API for this should look familiar! Like an `Interface`, the `click` method takes a Python function, input components, and output components. + +### More Complexity + +Here's an app to give you a taste of what's possible with `Blocks`: + +```python +import numpy as np +import gradio as gr + + +def flip_text(x): + return x[::-1] + + +def flip_image(x): + return np.fliplr(x) + + +with gr.Blocks() as demo: + gr.Markdown("Flip text or image files using this demo.") + with gr.Tab("Flip Text"): + text_input = gr.Textbox() + text_output = gr.Textbox() + text_button = gr.Button("Flip") + with gr.Tab("Flip Image"): + with gr.Row(): + image_input = gr.Image() + image_output = gr.Image() + image_button = gr.Button("Flip") + + with gr.Accordion("Open for More!"): + gr.Markdown("Look at me...") + + text_button.click(flip_text, inputs=text_input, outputs=text_output) + image_button.click(flip_image, inputs=image_input, outputs=image_output) + +demo.launch() +``` + +![`blocks_flipper` demo](demo/blocks_flipper/screenshot.gif) + +A lot more going on here! We'll cover how to create complex `Blocks` apps like this in the [building with blocks](https://gradio.app/blocks-and-event-listeners) section for you. + +Congrats, you're now familiar with the basics of Gradio! 🥳 Go to our [next guide](https://gradio.app/key_features) to learn more about the key features of Gradio. + + +## Open Source Stack + +Gradio is built with many wonderful open-source libraries, please support them as well! + +[huggingface](https://huggingface.co) +[python](https://www.python.org) +[fastapi](https://fastapi.tiangolo.com) +[encode](https://www.encode.io) +[svelte](https://svelte.dev) +[vite](https://vitejs.dev) +[pnpm](https://pnpm.io) +[tailwind](https://tailwindcss.com) +[storybook](https://storybook.js.org/) +[chromatic](https://www.chromatic.com/) + +## License + +Gradio is licensed under the Apache License 2.0 found in the [LICENSE](LICENSE) file in the root directory of this repository. + +## Citation + +Also check out the paper _[Gradio: Hassle-Free Sharing and Testing of ML Models in the Wild](https://arxiv.org/abs/1906.02569), ICML HILL 2019_, and please cite it if you use Gradio in your work. + +``` +@article{abid2019gradio, + title = {Gradio: Hassle-Free Sharing and Testing of ML Models in the Wild}, + author = {Abid, Abubakar and Abdalla, Ali and Abid, Ali and Khan, Dawood and Alfozan, Abdulrahman and Zou, James}, + journal = {arXiv preprint arXiv:1906.02569}, + year = {2019}, +} +``` diff --git a/testbed/gradio-app__gradio/client/js/CHANGELOG.md b/testbed/gradio-app__gradio/client/js/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..83d1e0f40bcbb85fd29f9a44acde8b65edd9824f --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/CHANGELOG.md @@ -0,0 +1,80 @@ +# @gradio/client + +## 0.4.0 + +### Features + +- [#5682](https://github.com/gradio-app/gradio/pull/5682) [`c57f1b75e`](https://github.com/gradio-app/gradio/commit/c57f1b75e272c76b0af4d6bd0c7f44743ff34f26) - Fix functional tests. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5681](https://github.com/gradio-app/gradio/pull/5681) [`40de3d217`](https://github.com/gradio-app/gradio/commit/40de3d2178b61ebe424b6f6228f94c0c6f679bea) - add query parameters to the `gr.Request` object through the `query_params` attribute. Thanks [@DarhkVoyd](https://github.com/DarhkVoyd)! +- [#5653](https://github.com/gradio-app/gradio/pull/5653) [`ea0e00b20`](https://github.com/gradio-app/gradio/commit/ea0e00b207b4b90a10e9d054c4202d4e705a29ba) - Prevent Clients from accessing API endpoints that set `api_name=False`. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 0.3.1 + +### Fixes + +- [#5412](https://github.com/gradio-app/gradio/pull/5412) [`26fef8c7`](https://github.com/gradio-app/gradio/commit/26fef8c7f85a006c7e25cdbed1792df19c512d02) - Skip view_api request in js client when auth enabled. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +## 0.3.0 + +### Features + +- [#5267](https://github.com/gradio-app/gradio/pull/5267) [`119c8343`](https://github.com/gradio-app/gradio/commit/119c834331bfae60d4742c8f20e9cdecdd67e8c2) - Faster reload mode. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +## 0.2.1 + +### Features + +- [#5173](https://github.com/gradio-app/gradio/pull/5173) [`730f0c1d`](https://github.com/gradio-app/gradio/commit/730f0c1d54792eb11359e40c9f2326e8a6e39203) - Ensure gradio client works as expected for functions that return nothing. Thanks [@raymondtri](https://github.com/raymondtri)! + +## 0.2.0 + +### Features + +- [#5133](https://github.com/gradio-app/gradio/pull/5133) [`61129052`](https://github.com/gradio-app/gradio/commit/61129052ed1391a75c825c891d57fa0ad6c09fc8) - Update dependency esbuild to ^0.19.0. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5035](https://github.com/gradio-app/gradio/pull/5035) [`8b4eb8ca`](https://github.com/gradio-app/gradio/commit/8b4eb8cac9ea07bde31b44e2006ca2b7b5f4de36) - JS Client: Fixes cannot read properties of null (reading 'is_file'). Thanks [@raymondtri](https://github.com/raymondtri)! + +### Fixes + +- [#5075](https://github.com/gradio-app/gradio/pull/5075) [`67265a58`](https://github.com/gradio-app/gradio/commit/67265a58027ef1f9e4c0eb849a532f72eaebde48) - Allow supporting >1000 files in `gr.File()` and `gr.UploadButton()`. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 0.1.4 + +### Patch Changes + +- [#4717](https://github.com/gradio-app/gradio/pull/4717) [`ab5d1ea0`](https://github.com/gradio-app/gradio/commit/ab5d1ea0de87ed888779b66fd2a705583bd29e02) Thanks [@whitphx](https://github.com/whitphx)! - Fix the package description + +## 0.1.3 + +### Patch Changes + +- [#4357](https://github.com/gradio-app/gradio/pull/4357) [`0dbd8f7f`](https://github.com/gradio-app/gradio/commit/0dbd8f7fee4b4877f783fa7bc493f98bbfc3d01d) Thanks [@pngwn](https://github.com/pngwn)! - Various internal refactors and cleanups. + +## 0.1.2 + +### Patch Changes + +- [#4273](https://github.com/gradio-app/gradio/pull/4273) [`1d0f0a9d`](https://github.com/gradio-app/gradio/commit/1d0f0a9db096552e67eb2197c932342587e9e61e) Thanks [@pngwn](https://github.com/pngwn)! - Ensure websocket error messages are correctly handled. + +- [#4315](https://github.com/gradio-app/gradio/pull/4315) [`b525b122`](https://github.com/gradio-app/gradio/commit/b525b122dd8569bbaf7e06db5b90d622d2e9073d) Thanks [@whitphx](https://github.com/whitphx)! - Refacor types. + +- [#4271](https://github.com/gradio-app/gradio/pull/4271) [`1151c525`](https://github.com/gradio-app/gradio/commit/1151c5253554cb87ebd4a44a8a470ac215ff782b) Thanks [@pngwn](https://github.com/pngwn)! - Ensure the full root path is always respected when making requests to a gradio app server. + +## 0.1.1 + +### Patch Changes + +- [#4201](https://github.com/gradio-app/gradio/pull/4201) [`da5b4ee1`](https://github.com/gradio-app/gradio/commit/da5b4ee11721175858ded96e5710225369097f74) Thanks [@pngwn](https://github.com/pngwn)! - Ensure semiver is bundled so CDN links work correctly. + +- [#4202](https://github.com/gradio-app/gradio/pull/4202) [`a26e9afd`](https://github.com/gradio-app/gradio/commit/a26e9afde319382993e6ddc77cc4e56337a31248) Thanks [@pngwn](https://github.com/pngwn)! - Ensure all URLs returned by the client are complete URLs with the correct host instead of an absolute path relative to a server. + +## 0.1.0 + +### Minor Changes + +- [#4185](https://github.com/gradio-app/gradio/pull/4185) [`67239ca9`](https://github.com/gradio-app/gradio/commit/67239ca9b2fe3796853fbf7bf865c9e4b383200d) Thanks [@pngwn](https://github.com/pngwn)! - Update client for initial release + +### Patch Changes + +- [#3692](https://github.com/gradio-app/gradio/pull/3692) [`48e8b113`](https://github.com/gradio-app/gradio/commit/48e8b113f4b55e461d9da4f153bf72aeb4adf0f1) Thanks [@pngwn](https://github.com/pngwn)! - Ensure client works in node, create ESM bundle and generate typescript declaration files. + +- [#3605](https://github.com/gradio-app/gradio/pull/3605) [`ae4277a9`](https://github.com/gradio-app/gradio/commit/ae4277a9a83d49bdadfe523b0739ba988128e73b) Thanks [@pngwn](https://github.com/pngwn)! - Update readme. \ No newline at end of file diff --git a/testbed/gradio-app__gradio/client/js/README.md b/testbed/gradio-app__gradio/client/js/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ddf80eb862ad5d36ec53cdef0f470d2aa67c41da --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/README.md @@ -0,0 +1,339 @@ +## JavaScript Client Library + +A javascript (and typescript) client to call Gradio APIs. + +## Installation + +The Gradio JavaScript client is available on npm as `@gradio/client`. You can install it as below: + +```sh +npm i @gradio/client +``` + +## Usage + +The JavaScript Gradio Client exposes two named imports, `client` and `duplicate`. + +### `client` + +The client function connects to the API of a hosted Gradio space and returns an object that allows you to make calls to that API. + +The simplest example looks like this: + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const result = await app.predict("/predict"); +``` + +This function accepts two arguments: `source` and `options`: + +#### `source` + +This is the url or name of the gradio app whose API you wish to connect to. This parameter is required and should always be a string. For example: + +```ts +client("user/space-name"); +``` + +#### `options` + +The options object can optionally be passed a second parameter. This object has two properties, `hf_token` and `status_callback`. + +##### `hf_token` + +This should be a Hugging Face personal access token and is required if you wish to make calls to a private gradio api. This option is optional and should be a string starting with `"hf_"`. + +Example: + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name", { hf_token: "hf_..." }); +``` + +##### `status_callback` + +This should be a function which will notify your of the status of a space if it is not running. If the gradio API you are connecting to is awake and running or is not hosted on Hugging Face space then this function will do nothing. + +**Additional context** + +Applications hosted on Hugging Face spaces can be in a number of different states. As spaces are a GitOps tool and will rebuild when new changes are pushed to the repository, they have various building, running and error states. If a space is not 'running' then the function passed as the `status_callback` will notify you of the current state of the space and the status of the space as it changes. Spaces that are building or sleeping can take longer than usual to respond, so you can use this information to give users feedback about the progress of their action. + +```ts +import { client, type SpaceStatus } from "@gradio/client"; + +const app = await client("user/space-name", { + // The space_status parameter does not need to be manually annotated, this is just for illustration. + space_status: (space_status: SpaceStatus) => console.log(space_status) +}); +``` + +```ts +interface SpaceStatusNormal { + status: "sleeping" | "running" | "building" | "error" | "stopped"; + detail: + | "SLEEPING" + | "RUNNING" + | "RUNNING_BUILDING" + | "BUILDING" + | "NOT_FOUND"; + load_status: "pending" | "error" | "complete" | "generating"; + message: string; +} + +interface SpaceStatusError { + status: "space_error"; + detail: "NO_APP_FILE" | "CONFIG_ERROR" | "BUILD_ERROR" | "RUNTIME_ERROR"; + load_status: "error"; + message: string; + discussions_enabled: boolean; + +type SpaceStatus = SpaceStatusNormal | SpaceStatusError; +``` + +The gradio client returns an object with a number of methods and properties: + +#### `predict` + +The `predict` method allows you to call an api endpoint and get a prediction result: + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const result = await app.predict("/predict"); +``` + +`predict` accepts two parameters, `endpoint` and `payload`. It returns a promise that resolves to the prediction result. + +##### `endpoint` + +This is the endpoint for an api request and is required. The default endpoint for a `gradio.Interface` is `"/predict"`. Explicitly named endpoints have a custom name. The endpoint names can be found on the "View API" page of a space. + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const result = await app.predict("/predict"); +``` + +##### `payload` + +The `payload` argument is generally optional but this depends on the API itself. If the API endpoint depends on values being passed in then it is required for the API request to succeed. The data that should be passed in is detailed on the "View API" page of a space, or accessible via the `view_api()` method of the client. + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const result = await app.predict("/predict", [1, "Hello", "friends"]); +``` + +#### `submit` + +The `submit` method provides a more flexible way to call an API endpoint, providing you with status updates about the current progress of the prediction as well as supporting more complex endpoint types. + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const submission = app.submit("/predict", payload); +``` + +The `submit` method accepts the same [`endpoint`](#endpoint) and [`payload`](#payload) arguments as `predict`. + +The `submit` method does not return a promise and should not be awaited, instead it returns an object with a `on`, `off`, and `cancel` methods. + +##### `on` + +The `on` method allows you to subscribe to events related to the submitted API request. There are two types of event that can be subscribed to: `"data"` updates and `"status"` updates. + +`"data"` updates are issued when the API computes a value, the callback provided as the second argument will be called when such a value is sent to the client. The shape of the data depends on the way the API itself is constructed. This event may fire more than once if that endpoint supports emmitting new values over time. + +`"status` updates are issued when the status of a request changes. This information allows you to offer feedback to users when the queue position of the request changes, or when the request changes from queued to processing. + +The status payload look like this: + +```ts +interface Status { + queue: boolean; + code?: string; + success?: boolean; + stage: "pending" | "error" | "complete" | "generating"; + size?: number; + position?: number; + eta?: number; + message?: string; + progress_data?: Array<{ + progress: number | null; + index: number | null; + length: number | null; + unit: string | null; + desc: string | null; + }>; + time?: Date; +} +``` + +Usage of these subscribe callback looks like this: + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const submission = app + .submit("/predict", payload) + .on("data", (data) => console.log(data)) + .on("status", (status: Status) => console.log(status)); +``` + +##### `off` + +The `off` method unsubscribes from a specific event of the submitted job and works similarly to `document.removeEventListener`; both the event name and the original callback must be passed in to successfully unsubscribe: + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const handle_data = (data) => console.log(data); + +const submission = app.submit("/predict", payload).on("data", handle_data); + +// later +submission.off("/predict", handle_data); +``` + +##### `destroy` + +The `destroy` method will remove all subscriptions to a job, regardless of whether or not they are `"data"` or `"status"` events. This is a convenience method for when you do not want to unsubscribe use the `off` method. + +```js +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const handle_data = (data) => console.log(data); + +const submission = app.submit("/predict", payload).on("data", handle_data); + +// later +submission.destroy(); +``` + +##### `cancel` + +Certain types of gradio function can run repeatedly and in some cases indefinitely. the `cancel` method will stop such an endpoints and prevent the API from issuing additional updates. + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const submission = app + .submit("/predict", payload) + .on("data", (data) => console.log(data)); + +// later + +submission.cancel(); +``` + +#### `view_api` + +The `view_api` method provides details about the API you are connected to. It returns a JavaScript object of all named endpoints, unnamed endpoints and what values they accept and return. This method does not accept arguments. + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +const api_info = await app.view_api(); + +console.log(api_info); +``` + +#### `config` + +The `config` property contains the configuration for the gradio application you are connected to. This object may contain useful meta information about the application. + +```ts +import { client } from "@gradio/client"; + +const app = await client("user/space-name"); +console.log(app.config); +``` + +### `duplicate` + +The duplicate function will attempt to duplicate the space that is referenced and return an instance of `client` connected to that space. If the space has already been duplicated then it will not create a new duplicate and will instead connect to the existing duplicated space. The huggingface token that is passed in will dictate the user under which the space is created. + +`duplicate` accepts the same arguments as `client` with the addition of a `private` options property dictating whether the duplicated space should be private or public. A huggingface token is required for duplication to work. + +```ts +import { duplicate } from "@gradio/client"; + +const app = await duplicate("user/space-name", { + hf_token: "hf_..." +}); +``` + +This function accepts two arguments: `source` and `options`: + +#### `source` + +The space to duplicate and connect to. [See `client`'s `source` parameter](#source). + +#### `options` + +Accepts all options that `client` accepts, except `hf_token` is required. [See `client`'s `options` parameter](#source). + +`duplicate` also accepts one additional `options` property. + +##### `private` + +This is an optional property specific to `duplicate`'s options object and will determine whether the space should be public or private. Spaces duplicated via the `duplicate` method are public by default. + +```ts +import { duplicate } from "@gradio/client"; + +const app = await duplicate("user/space-name", { + hf_token: "hf_...", + private: true +}); +``` + +##### `timeout` + +This is an optional property specific to `duplicate`'s options object and will set the timeout in minutes before the duplicated space will go to sleep. + +```ts +import { duplicate } from "@gradio/client"; + +const app = await duplicate("user/space-name", { + hf_token: "hf_...", + private: true, + timeout: 5 +}); +``` + +##### `hardware` + +This is an optional property specific to `duplicate`'s options object and will set the hardware for the duplicated space. By default the hardware used will match that of the original space. If this cannot be obtained it will default to `"cpu-basic"`. For hardware upgrades (beyond the basic CPU tier), you may be required to provide [billing information on Hugging Face](https://huggingface.co/settings/billing). + +Possible hardware options are: + +- `"cpu-basic"` +- `"cpu-upgrade"` +- `"t4-small"` +- `"t4-medium"` +- `"a10g-small"` +- `"a10g-large"` +- `"a100-large"` + +```ts +import { duplicate } from "@gradio/client"; + +const app = await duplicate("user/space-name", { + hf_token: "hf_...", + private: true, + hardware: "a10g-small" +}); +``` diff --git a/testbed/gradio-app__gradio/client/js/package.json b/testbed/gradio-app__gradio/client/js/package.json new file mode 100644 index 0000000000000000000000000000000000000000..a7c6dab6cbb126d9fa16ef1a5bf4c03e1901ef28 --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/package.json @@ -0,0 +1,33 @@ +{ + "name": "@gradio/client", + "version": "0.4.0", + "description": "Gradio API client", + "type": "module", + "main": "dist/index.js", + "author": "", + "license": "ISC", + "exports": { + ".": { + "import": "./dist/index.js" + }, + "./package.json": "./package.json" + }, + "dependencies": { + "bufferutil": "^4.0.7", + "semiver": "^1.1.0", + "ws": "^8.13.0" + }, + "devDependencies": { + "@types/ws": "^8.5.4", + "esbuild": "^0.19.0" + }, + "scripts": { + "bundle": "vite build --ssr", + "generate_types": "tsc", + "build": "pnpm bundle && pnpm generate_types" + }, + "engines": { + "node": ">=18.0.0" + }, + "main_changeset": true +} diff --git a/testbed/gradio-app__gradio/client/js/src/client.node-test.ts b/testbed/gradio-app__gradio/client/js/src/client.node-test.ts new file mode 100644 index 0000000000000000000000000000000000000000..9964583b4558d55a62b0fbf78a440e572e5a204f --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/src/client.node-test.ts @@ -0,0 +1,172 @@ +import { test, describe, assert } from "vitest"; +import { readFileSync } from "fs"; +import { join, dirname } from "path"; +import { fileURLToPath } from "url"; +import { Blob } from "node:buffer"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const image_path = join( + __dirname, + "..", + "..", + "..", + "demo", + "kitchen_sink", + "files", + "lion.jpg" +); + +import { walk_and_store_blobs, client, handle_blob } from "./client"; + +describe.skip("extract blob parts", () => { + test("convert Buffer to Blob", async () => { + const image = readFileSync(image_path); + await client("gradio/hello_world_main"); + const parts = walk_and_store_blobs({ + data: { + image + } + }); + + assert.isTrue(parts[0].blob instanceof Blob); + }); + + test("leave node Blob as Blob", async () => { + const image = new Blob([readFileSync(image_path)]); + + await client("gradio/hello_world_main"); + const parts = walk_and_store_blobs({ + data: { + image + } + }); + + assert.isTrue(parts[0].blob instanceof Blob); + }); + + test("handle deep structures", async () => { + const image = new Blob([readFileSync(image_path)]); + + await client("gradio/hello_world_main"); + const parts = walk_and_store_blobs({ + a: { + b: { + data: { + image + } + } + } + }); + + assert.isTrue(parts[0].blob instanceof Blob); + }); + + test("handle deep structures with arrays", async () => { + const image = new Blob([readFileSync(image_path)]); + + await client("gradio/hello_world_main"); + const parts = walk_and_store_blobs({ + a: [ + { + b: [ + { + data: [ + { + image + } + ] + } + ] + } + ] + }); + + assert.isTrue(parts[0].blob instanceof Blob); + }); + + test("handle deep structures with arrays 2", async () => { + const image = new Blob([readFileSync(image_path)]); + + await client("gradio/hello_world_main"); + const obj = { + a: [ + { + b: [ + { + data: [[image], image, [image, [image]]] + } + ] + } + ] + }; + const parts = walk_and_store_blobs(obj); + + function map_path( + obj: Record, + parts: { path: string[]; blob: any }[] + ) { + const { path, blob } = parts[parts.length - 1]; + let ref = obj; + path.forEach((p) => (ref = ref[p])); + + return ref === blob; + } + + assert.isTrue(parts[0].blob instanceof Blob); + // assert.isTrue(map_path(obj, parts)); + }); +}); + +describe("handle_blob", () => { + test("handle blobs", async () => { + const image = new Blob([readFileSync(image_path)]); + + const app = await client("gradio/hello_world_main"); + const obj = [ + { + a: [ + { + b: [ + { + data: [[image], image, [image, [image]]] + } + ] + } + ] + } + ]; + + const parts = await handle_blob(app.config.root, obj, undefined); + //@ts-ignore + // assert.isString(parts.data[0].a[0].b[0].data[0][0]); + }); +}); + +describe.skip("private space", () => { + test("can access a private space", async () => { + const image = new Blob([readFileSync(image_path)]); + + const app = await client("pngwn/hello_world", { + hf_token: "hf_" + }); + + console.log(app); + const obj = [ + { + a: [ + { + b: [ + { + data: [[image], image, [image, [image]]] + } + ] + } + ] + } + ]; + + const parts = await handle_blob(app.config.root, obj, "hf_"); + //@ts-ignore + assert.isString(parts.data[0].a[0].b[0].data[0][0]); + }); +}); diff --git a/testbed/gradio-app__gradio/client/js/src/client.ts b/testbed/gradio-app__gradio/client/js/src/client.ts new file mode 100644 index 0000000000000000000000000000000000000000..b5e9e899eeb17b9e7c94ccf600b316cb7f3fc894 --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/src/client.ts @@ -0,0 +1,1367 @@ +import semiver from "semiver"; + +import { + process_endpoint, + RE_SPACE_NAME, + map_names_to_ids, + discussions_enabled, + get_space_hardware, + set_space_hardware, + set_space_timeout, + hardware_types +} from "./utils.js"; + +import type { + EventType, + EventListener, + ListenerMap, + Event, + Payload, + PostResponse, + UploadResponse, + Status, + SpaceStatus, + SpaceStatusCallback, + FileData +} from "./types.js"; + +import type { Config } from "./types.js"; + +type event = ( + eventType: K, + listener: EventListener +) => SubmitReturn; +type predict = ( + endpoint: string | number, + data?: unknown[], + event_data?: unknown +) => Promise; + +type client_return = { + predict: predict; + config: Config; + submit: ( + endpoint: string | number, + data?: unknown[], + event_data?: unknown + ) => SubmitReturn; + view_api: (c?: Config) => Promise>; +}; + +type SubmitReturn = { + on: event; + off: event; + cancel: () => Promise; + destroy: () => void; +}; + +const QUEUE_FULL_MSG = "This application is too busy. Keep trying!"; +const BROKEN_CONNECTION_MSG = "Connection errored out."; + +export let NodeBlob; + +export async function duplicate( + app_reference: string, + options: { + hf_token: `hf_${string}`; + private?: boolean; + status_callback: SpaceStatusCallback; + hardware?: (typeof hardware_types)[number]; + timeout?: number; + } +): Promise { + const { hf_token, private: _private, hardware, timeout } = options; + + if (hardware && !hardware_types.includes(hardware)) { + throw new Error( + `Invalid hardware type provided. Valid types are: ${hardware_types + .map((v) => `"${v}"`) + .join(",")}.` + ); + } + const headers = { + Authorization: `Bearer ${hf_token}` + }; + + const user = ( + await ( + await fetch(`https://huggingface.co/api/whoami-v2`, { + headers + }) + ).json() + ).name; + + const space_name = app_reference.split("/")[1]; + const body: { + repository: string; + private?: boolean; + } = { + repository: `${user}/${space_name}` + }; + + if (_private) { + body.private = true; + } + + try { + const response = await fetch( + `https://huggingface.co/api/spaces/${app_reference}/duplicate`, + { + method: "POST", + headers: { "Content-Type": "application/json", ...headers }, + body: JSON.stringify(body) + } + ); + + if (response.status === 409) { + return client(`${user}/${space_name}`, options); + } + const duplicated_space = await response.json(); + + let original_hardware; + + if (!hardware) { + original_hardware = await get_space_hardware(app_reference, hf_token); + } + + const requested_hardware = hardware || original_hardware || "cpu-basic"; + await set_space_hardware( + `${user}/${space_name}`, + requested_hardware, + hf_token + ); + + await set_space_timeout(`${user}/${space_name}`, timeout || 300, hf_token); + return client(duplicated_space.url, options); + } catch (e: any) { + throw new Error(e); + } +} + +interface Client { + post_data: ( + url: string, + body: unknown, + token?: `hf_${string}` + ) => Promise<[PostResponse, number]>; + upload_files: ( + root: string, + files: File[], + token?: `hf_${string}` + ) => Promise; + client: ( + app_reference: string, + options: { + hf_token?: `hf_${string}`; + status_callback?: SpaceStatusCallback; + normalise_files?: boolean; + } + ) => Promise; + handle_blob: ( + endpoint: string, + data: unknown[], + api_info: ApiInfo, + token?: `hf_${string}` + ) => Promise; +} + +export function api_factory(fetch_implementation: typeof fetch): Client { + return { post_data, upload_files, client, handle_blob }; + + async function post_data( + url: string, + body: unknown, + token?: `hf_${string}` + ): Promise<[PostResponse, number]> { + const headers: { + Authorization?: string; + "Content-Type": "application/json"; + } = { "Content-Type": "application/json" }; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + try { + var response = await fetch_implementation(url, { + method: "POST", + body: JSON.stringify(body), + headers + }); + } catch (e) { + return [{ error: BROKEN_CONNECTION_MSG }, 500]; + } + const output: PostResponse = await response.json(); + return [output, response.status]; + } + + async function upload_files( + root: string, + files: (Blob | File)[], + token?: `hf_${string}` + ): Promise { + const headers: { + Authorization?: string; + } = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + const chunkSize = 1000; + const uploadResponses = []; + for (let i = 0; i < files.length; i += chunkSize) { + const chunk = files.slice(i, i + chunkSize); + const formData = new FormData(); + chunk.forEach((file) => { + formData.append("files", file); + }); + try { + var response = await fetch_implementation(`${root}/upload`, { + method: "POST", + body: formData, + headers + }); + } catch (e) { + return { error: BROKEN_CONNECTION_MSG }; + } + const output: UploadResponse["files"] = await response.json(); + uploadResponses.push(...output); + } + return { files: uploadResponses }; + } + + async function client( + app_reference: string, + options: { + hf_token?: `hf_${string}`; + status_callback?: SpaceStatusCallback; + normalise_files?: boolean; + } = { normalise_files: true } + ): Promise { + return new Promise(async (res) => { + const { status_callback, hf_token, normalise_files } = options; + const return_obj = { + predict, + submit, + view_api + // duplicate + }; + + const transform_files = normalise_files ?? true; + if (typeof window === "undefined" || !("WebSocket" in window)) { + const ws = await import("ws"); + NodeBlob = (await import("node:buffer")).Blob; + //@ts-ignore + global.WebSocket = ws.WebSocket; + } + + const { ws_protocol, http_protocol, host, space_id } = + await process_endpoint(app_reference, hf_token); + + const session_hash = Math.random().toString(36).substring(2); + const last_status: Record = {}; + let config: Config; + let api_map: Record = {}; + + let jwt: false | string = false; + + if (hf_token && space_id) { + jwt = await get_jwt(space_id, hf_token); + } + + async function config_success(_config: Config): Promise { + config = _config; + api_map = map_names_to_ids(_config?.dependencies || []); + if (config.auth_required) { + return { + config, + ...return_obj + }; + } + try { + api = await view_api(config); + } catch (e) { + console.error(`Could not get api details: ${e.message}`); + } + + return { + config, + ...return_obj + }; + } + let api: ApiInfo; + async function handle_space_sucess(status: SpaceStatus): Promise { + if (status_callback) status_callback(status); + if (status.status === "running") + try { + config = await resolve_config( + fetch_implementation, + `${http_protocol}//${host}`, + hf_token + ); + + const _config = await config_success(config); + res(_config); + } catch (e) { + console.error(e); + if (status_callback) { + status_callback({ + status: "error", + message: "Could not load this space.", + load_status: "error", + detail: "NOT_FOUND" + }); + } + } + } + + try { + config = await resolve_config( + fetch_implementation, + `${http_protocol}//${host}`, + hf_token + ); + + const _config = await config_success(config); + res(_config); + } catch (e) { + console.error(e); + if (space_id) { + check_space_status( + space_id, + RE_SPACE_NAME.test(space_id) ? "space_name" : "subdomain", + handle_space_sucess + ); + } else { + if (status_callback) + status_callback({ + status: "error", + message: "Could not load this space.", + load_status: "error", + detail: "NOT_FOUND" + }); + } + } + + function predict( + endpoint: string, + data: unknown[], + event_data?: unknown + ): Promise { + let data_returned = false; + let status_complete = false; + let dependency; + if (typeof endpoint === "number") { + dependency = config.dependencies[endpoint]; + } else { + const trimmed_endpoint = endpoint.replace(/^\//, ""); + dependency = config.dependencies[api_map[trimmed_endpoint]]; + } + + if (dependency.types.continuous) { + throw new Error( + "Cannot call predict on this function as it may run forever. Use submit instead" + ); + } + + return new Promise((res, rej) => { + const app = submit(endpoint, data, event_data); + let result; + + app + .on("data", (d) => { + // if complete message comes before data, resolve here + if (status_complete) { + app.destroy(); + res(d); + } + data_returned = true; + result = d; + }) + .on("status", (status) => { + if (status.stage === "error") rej(status); + if (status.stage === "complete") { + status_complete = true; + app.destroy(); + // if complete message comes after data, resolve here + if (data_returned) { + res(result); + } + } + }); + }); + } + + function submit( + endpoint: string | number, + data: unknown[], + event_data?: unknown + ): SubmitReturn { + let fn_index: number; + let api_info; + + if (typeof endpoint === "number") { + fn_index = endpoint; + api_info = api.unnamed_endpoints[fn_index]; + } else { + const trimmed_endpoint = endpoint.replace(/^\//, ""); + + fn_index = api_map[trimmed_endpoint]; + api_info = api.named_endpoints[endpoint.trim()]; + } + + if (typeof fn_index !== "number") { + throw new Error( + "There is no endpoint matching that name of fn_index matching that number." + ); + } + + let websocket: WebSocket; + + const _endpoint = typeof endpoint === "number" ? "/predict" : endpoint; + let payload: Payload; + let complete: false | Record = false; + const listener_map: ListenerMap = {}; + const url_params = new URLSearchParams( + window.location.search + ).toString(); + + handle_blob( + `${http_protocol}//${host + config.path}`, + data, + api_info, + hf_token + ).then((_payload) => { + payload = { data: _payload || [], event_data, fn_index }; + if (skip_queue(fn_index, config)) { + fire_event({ + type: "status", + endpoint: _endpoint, + stage: "pending", + queue: false, + fn_index, + time: new Date() + }); + + post_data( + `${http_protocol}//${host + config.path}/run${ + _endpoint.startsWith("/") ? _endpoint : `/${_endpoint}` + }${url_params ? "?" + url_params : ""}`, + { + ...payload, + session_hash + }, + hf_token + ) + .then(([output, status_code]) => { + const data = transform_files + ? transform_output( + output.data, + api_info, + config.root, + config.root_url + ) + : output.data; + if (status_code == 200) { + fire_event({ + type: "data", + endpoint: _endpoint, + fn_index, + data: data, + time: new Date() + }); + + fire_event({ + type: "status", + endpoint: _endpoint, + fn_index, + stage: "complete", + eta: output.average_duration, + queue: false, + time: new Date() + }); + } else { + fire_event({ + type: "status", + stage: "error", + endpoint: _endpoint, + fn_index, + message: output.error, + queue: false, + time: new Date() + }); + } + }) + .catch((e) => { + fire_event({ + type: "status", + stage: "error", + message: e.message, + endpoint: _endpoint, + fn_index, + queue: false, + time: new Date() + }); + }); + } else { + fire_event({ + type: "status", + stage: "pending", + queue: true, + endpoint: _endpoint, + fn_index, + time: new Date() + }); + + let url = new URL(`${ws_protocol}://${host}${config.path} + /queue/join${url_params ? "?" + url_params : ""}`); + + if (jwt) { + url.searchParams.set("__sign", jwt); + } + + websocket = new WebSocket(url); + + websocket.onclose = (evt) => { + if (!evt.wasClean) { + fire_event({ + type: "status", + stage: "error", + broken: true, + message: BROKEN_CONNECTION_MSG, + queue: true, + endpoint: _endpoint, + fn_index, + time: new Date() + }); + } + }; + + websocket.onmessage = function (event) { + const _data = JSON.parse(event.data); + const { type, status, data } = handle_message( + _data, + last_status[fn_index] + ); + + if (type === "update" && status && !complete) { + // call 'status' listeners + fire_event({ + type: "status", + endpoint: _endpoint, + fn_index, + time: new Date(), + ...status + }); + if (status.stage === "error") { + websocket.close(); + } + } else if (type === "hash") { + websocket.send(JSON.stringify({ fn_index, session_hash })); + return; + } else if (type === "data") { + websocket.send(JSON.stringify({ ...payload, session_hash })); + } else if (type === "complete") { + complete = status; + } else if (type === "log") { + fire_event({ + type: "log", + log: data.log, + level: data.level, + endpoint: _endpoint, + fn_index + }); + } else if (type === "generating") { + fire_event({ + type: "status", + time: new Date(), + ...status, + stage: status?.stage!, + queue: true, + endpoint: _endpoint, + fn_index + }); + } + if (data) { + fire_event({ + type: "data", + time: new Date(), + data: transform_files + ? transform_output( + data.data, + api_info, + config.root, + config.root_url + ) + : data.data, + endpoint: _endpoint, + fn_index + }); + + if (complete) { + fire_event({ + type: "status", + time: new Date(), + ...complete, + stage: status?.stage!, + queue: true, + endpoint: _endpoint, + fn_index + }); + websocket.close(); + } + } + }; + + // different ws contract for gradio versions older than 3.6.0 + //@ts-ignore + if (semiver(config.version || "2.0.0", "3.6") < 0) { + addEventListener("open", () => + websocket.send(JSON.stringify({ hash: session_hash })) + ); + } + } + }); + + function fire_event(event: Event): void { + const narrowed_listener_map: ListenerMap = listener_map; + const listeners = narrowed_listener_map[event.type] || []; + listeners?.forEach((l) => l(event)); + } + + function on( + eventType: K, + listener: EventListener + ): SubmitReturn { + const narrowed_listener_map: ListenerMap = listener_map; + const listeners = narrowed_listener_map[eventType] || []; + narrowed_listener_map[eventType] = listeners; + listeners?.push(listener); + + return { on, off, cancel, destroy }; + } + + function off( + eventType: K, + listener: EventListener + ): SubmitReturn { + const narrowed_listener_map: ListenerMap = listener_map; + let listeners = narrowed_listener_map[eventType] || []; + listeners = listeners?.filter((l) => l !== listener); + narrowed_listener_map[eventType] = listeners; + + return { on, off, cancel, destroy }; + } + + async function cancel(): Promise { + const _status: Status = { + stage: "complete", + queue: false, + time: new Date() + }; + complete = _status; + fire_event({ + ..._status, + type: "status", + endpoint: _endpoint, + fn_index: fn_index + }); + + if (websocket && websocket.readyState === 0) { + websocket.addEventListener("open", () => { + websocket.close(); + }); + } else { + websocket.close(); + } + + try { + await fetch_implementation( + `${http_protocol}//${host + config.path}/reset`, + { + headers: { "Content-Type": "application/json" }, + method: "POST", + body: JSON.stringify({ fn_index, session_hash }) + } + ); + } catch (e) { + console.warn( + "The `/reset` endpoint could not be called. Subsequent endpoint results may be unreliable." + ); + } + } + + function destroy(): void { + for (const event_type in listener_map) { + listener_map[event_type as "data" | "status"].forEach((fn) => { + off(event_type as "data" | "status", fn); + }); + } + } + + return { + on, + off, + cancel, + destroy + }; + } + + async function view_api(config?: Config): Promise> { + if (api) return api; + + const headers: { + Authorization?: string; + "Content-Type": "application/json"; + } = { "Content-Type": "application/json" }; + if (hf_token) { + headers.Authorization = `Bearer ${hf_token}`; + } + let response: Response; + // @ts-ignore + if (semiver(config.version || "2.0.0", "3.30") < 0) { + response = await fetch_implementation( + "https://gradio-space-api-fetcher-v2.hf.space/api", + { + method: "POST", + body: JSON.stringify({ + serialize: false, + config: JSON.stringify(config) + }), + headers + } + ); + } else { + response = await fetch_implementation(`${config.root}/info`, { + headers + }); + } + + if (!response.ok) { + throw new Error(BROKEN_CONNECTION_MSG); + } + + let api_info = (await response.json()) as + | ApiInfo + | { api: ApiInfo }; + if ("api" in api_info) { + api_info = api_info.api; + } + + if ( + api_info.named_endpoints["/predict"] && + !api_info.unnamed_endpoints["0"] + ) { + api_info.unnamed_endpoints[0] = api_info.named_endpoints["/predict"]; + } + + const x = transform_api_info(api_info, config, api_map); + return x; + } + }); + } + + async function handle_blob( + endpoint: string, + data: unknown[], + api_info: ApiInfo, + token?: `hf_${string}` + ): Promise { + const blob_refs = await walk_and_store_blobs( + data, + undefined, + [], + true, + api_info + ); + + return Promise.all( + blob_refs.map(async ({ path, blob, data, type }) => { + if (blob) { + const file_url = (await upload_files(endpoint, [blob], token)) + .files[0]; + return { path, file_url, type }; + } + return { path, base64: data, type }; + }) + ).then((r) => { + r.forEach(({ path, file_url, base64, type }) => { + if (base64) { + update_object(data, base64, path); + } else if (type === "Gallery") { + update_object(data, file_url, path); + } else if (file_url) { + const o = { + is_file: true, + name: `${file_url}`, + data: null + // orig_name: "file.csv" + }; + update_object(data, o, path); + } + }); + + return data; + }); + } +} + +export const { post_data, upload_files, client, handle_blob } = + api_factory(fetch); + +function transform_output( + data: any[], + api_info: any, + root_url: string, + remote_url?: string +): unknown[] { + return data.map((d, i) => { + if (api_info?.returns?.[i]?.component === "File") { + return normalise_file(d, root_url, remote_url); + } else if (api_info?.returns?.[i]?.component === "Gallery") { + return d.map((img) => { + return Array.isArray(img) + ? [normalise_file(img[0], root_url, remote_url), img[1]] + : [normalise_file(img, root_url, remote_url), null]; + }); + } else if (typeof d === "object" && d?.is_file) { + return normalise_file(d, root_url, remote_url); + } + return d; + }); +} + +function normalise_file( + file: FileData[], + root: string, + root_url: string | null +): FileData[]; +function normalise_file( + file: FileData | string, + root: string, + root_url: string | null +): FileData; +function normalise_file( + file: null, + root: string, + root_url: string | null +): null; +function normalise_file(file, root, root_url): FileData[] | FileData | null { + if (file == null) return null; + if (typeof file === "string") { + return { + name: "file_data", + data: file + }; + } else if (Array.isArray(file)) { + const normalized_file: (FileData | null)[] = []; + + for (const x of file) { + if (x === null) { + normalized_file.push(null); + } else { + normalized_file.push(normalise_file(x, root, root_url)); + } + } + + return normalized_file as FileData[]; + } else if (file.is_file) { + if (!root_url) { + file.data = root + "/file=" + file.name; + } else { + file.data = "/proxy=" + root_url + "file=" + file.name; + } + } + return file; +} + +interface ApiData { + label: string; + type: { + type: any; + description: string; + }; + component: string; + example_input?: any; +} + +interface JsApiData { + label: string; + type: string; + component: string; + example_input: any; +} + +interface EndpointInfo { + parameters: T[]; + returns: T[]; +} +interface ApiInfo { + named_endpoints: { + [key: string]: EndpointInfo; + }; + unnamed_endpoints: { + [key: string]: EndpointInfo; + }; +} + +function get_type( + type: { [key: string]: any }, + component: string, + serializer: string, + signature_type: "return" | "parameter" +): string { + switch (type.type) { + case "string": + return "string"; + case "boolean": + return "boolean"; + case "number": + return "number"; + } + + if ( + serializer === "JSONSerializable" || + serializer === "StringSerializable" + ) { + return "any"; + } else if (serializer === "ListStringSerializable") { + return "string[]"; + } else if (component === "Image") { + return signature_type === "parameter" ? "Blob | File | Buffer" : "string"; + } else if (serializer === "FileSerializable") { + if (type?.type === "array") { + return signature_type === "parameter" + ? "(Blob | File | Buffer)[]" + : `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}[]`; + } + return signature_type === "parameter" + ? "Blob | File | Buffer" + : `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}`; + } else if (serializer === "GallerySerializable") { + return signature_type === "parameter" + ? "[(Blob | File | Buffer), (string | null)][]" + : `[{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}, (string | null))][]`; + } +} + +function get_description( + type: { type: any; description: string }, + serializer: string +): string { + if (serializer === "GallerySerializable") { + return "array of [file, label] tuples"; + } else if (serializer === "ListStringSerializable") { + return "array of strings"; + } else if (serializer === "FileSerializable") { + return "array of files or single file"; + } + return type.description; +} + +function transform_api_info( + api_info: ApiInfo, + config: Config, + api_map: Record +): ApiInfo { + const new_data = { + named_endpoints: {}, + unnamed_endpoints: {} + }; + for (const key in api_info) { + const cat = api_info[key]; + + for (const endpoint in cat) { + const dep_index = config.dependencies[endpoint] + ? endpoint + : api_map[endpoint.replace("/", "")]; + + const info = cat[endpoint]; + new_data[key][endpoint] = {}; + new_data[key][endpoint].parameters = {}; + new_data[key][endpoint].returns = {}; + new_data[key][endpoint].type = config.dependencies[dep_index].types; + new_data[key][endpoint].parameters = info.parameters.map( + ({ label, component, type, serializer }) => ({ + label, + component, + type: get_type(type, component, serializer, "parameter"), + description: get_description(type, serializer) + }) + ); + + new_data[key][endpoint].returns = info.returns.map( + ({ label, component, type, serializer }) => ({ + label, + component, + type: get_type(type, component, serializer, "return"), + description: get_description(type, serializer) + }) + ); + } + } + + return new_data; +} + +async function get_jwt( + space: string, + token: `hf_${string}` +): Promise { + try { + const r = await fetch(`https://huggingface.co/api/spaces/${space}/jwt`, { + headers: { + Authorization: `Bearer ${token}` + } + }); + + const jwt = (await r.json()).token; + + return jwt || false; + } catch (e) { + console.error(e); + return false; + } +} + +function update_object(object, newValue, stack): void { + while (stack.length > 1) { + object = object[stack.shift()]; + } + + object[stack.shift()] = newValue; +} + +export async function walk_and_store_blobs( + param, + type = undefined, + path = [], + root = false, + api_info = undefined +): Promise< + { + path: string[]; + data: string | false; + type: string; + blob: Blob | false; + }[] +> { + if (Array.isArray(param)) { + let blob_refs = []; + + await Promise.all( + param.map(async (v, i) => { + let new_path = path.slice(); + new_path.push(i); + + const array_refs = await walk_and_store_blobs( + param[i], + root ? api_info?.parameters[i]?.component || undefined : type, + new_path, + false, + api_info + ); + + blob_refs = blob_refs.concat(array_refs); + }) + ); + + return blob_refs; + } else if (globalThis.Buffer && param instanceof globalThis.Buffer) { + const is_image = type === "Image"; + return [ + { + path: path, + blob: is_image ? false : new NodeBlob([param]), + data: is_image ? `${param.toString("base64")}` : false, + type + } + ]; + } else if ( + param instanceof Blob || + (typeof window !== "undefined" && param instanceof File) + ) { + if (type === "Image") { + let data; + + if (typeof window !== "undefined") { + // browser + data = await image_to_data_uri(param); + } else { + const buffer = await param.arrayBuffer(); + data = Buffer.from(buffer).toString("base64"); + } + + return [{ path, data, type, blob: false }]; + } + return [{ path: path, blob: param, type, data: false }]; + } else if (typeof param === "object") { + let blob_refs = []; + for (let key in param) { + if (param.hasOwnProperty(key)) { + let new_path = path.slice(); + new_path.push(key); + blob_refs = blob_refs.concat( + await walk_and_store_blobs( + param[key], + undefined, + new_path, + false, + api_info + ) + ); + } + } + return blob_refs; + } + return []; +} + +function image_to_data_uri(blob: Blob): Promise { + return new Promise((resolve, _) => { + const reader = new FileReader(); + reader.onloadend = () => resolve(reader.result); + reader.readAsDataURL(blob); + }); +} + +function skip_queue(id: number, config: Config): boolean { + return ( + !(config?.dependencies?.[id]?.queue === null + ? config.enable_queue + : config?.dependencies?.[id]?.queue) || false + ); +} + +async function resolve_config( + fetch_implementation: typeof fetch, + endpoint?: string, + token?: `hf_${string}` +): Promise { + const headers: { Authorization?: string } = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + if ( + typeof window !== "undefined" && + window.gradio_config && + location.origin !== "http://localhost:9876" && + !window.gradio_config.dev_mode + ) { + const path = window.gradio_config.root; + const config = window.gradio_config; + config.root = endpoint + config.root; + return { ...config, path: path }; + } else if (endpoint) { + let response = await fetch_implementation(`${endpoint}/config`, { + headers + }); + + if (response.status === 200) { + const config = await response.json(); + config.path = config.path ?? ""; + config.root = endpoint; + return config; + } + throw new Error("Could not get config."); + } + + throw new Error("No config or app endpoint found"); +} + +async function check_space_status( + id: string, + type: "subdomain" | "space_name", + status_callback: SpaceStatusCallback +): Promise { + let endpoint = + type === "subdomain" + ? `https://huggingface.co/api/spaces/by-subdomain/${id}` + : `https://huggingface.co/api/spaces/${id}`; + let response; + let _status; + try { + response = await fetch(endpoint); + _status = response.status; + if (_status !== 200) { + throw new Error(); + } + response = await response.json(); + } catch (e) { + status_callback({ + status: "error", + load_status: "error", + message: "Could not get space status", + detail: "NOT_FOUND" + }); + return; + } + + if (!response || _status !== 200) return; + const { + runtime: { stage }, + id: space_name + } = response; + + switch (stage) { + case "STOPPED": + case "SLEEPING": + status_callback({ + status: "sleeping", + load_status: "pending", + message: "Space is asleep. Waking it up...", + detail: stage + }); + + setTimeout(() => { + check_space_status(id, type, status_callback); + }, 1000); // poll for status + break; + case "PAUSED": + status_callback({ + status: "paused", + load_status: "error", + message: + "This space has been paused by the author. If you would like to try this demo, consider duplicating the space.", + detail: stage, + discussions_enabled: await discussions_enabled(space_name) + }); + break; + case "RUNNING": + case "RUNNING_BUILDING": + status_callback({ + status: "running", + load_status: "complete", + message: "", + detail: stage + }); + // load_config(source); + // launch + break; + case "BUILDING": + status_callback({ + status: "building", + load_status: "pending", + message: "Space is building...", + detail: stage + }); + + setTimeout(() => { + check_space_status(id, type, status_callback); + }, 1000); + break; + default: + status_callback({ + status: "space_error", + load_status: "error", + message: "This space is experiencing an issue.", + detail: stage, + discussions_enabled: await discussions_enabled(space_name) + }); + break; + } +} + +function handle_message( + data: any, + last_status: Status["stage"] +): { + type: "hash" | "data" | "update" | "complete" | "generating" | "log" | "none"; + data?: any; + status?: Status; +} { + const queue = true; + switch (data.msg) { + case "send_data": + return { type: "data" }; + case "send_hash": + return { type: "hash" }; + case "queue_full": + return { + type: "update", + status: { + queue, + message: QUEUE_FULL_MSG, + stage: "error", + code: data.code, + success: data.success + } + }; + case "estimation": + return { + type: "update", + status: { + queue, + stage: last_status || "pending", + code: data.code, + size: data.queue_size, + position: data.rank, + eta: data.rank_eta, + success: data.success + } + }; + case "progress": + return { + type: "update", + status: { + queue, + stage: "pending", + code: data.code, + progress_data: data.progress_data, + success: data.success + } + }; + case "log": + return { type: "log", data: data }; + case "process_generating": + return { + type: "generating", + status: { + queue, + message: !data.success ? data.output.error : null, + stage: data.success ? "generating" : "error", + code: data.code, + progress_data: data.progress_data, + eta: data.average_duration + }, + data: data.success ? data.output : null + }; + case "process_completed": + if ("error" in data.output) { + return { + type: "update", + status: { + queue, + message: data.output.error as string, + stage: "error", + code: data.code, + success: data.success + } + }; + } + return { + type: "complete", + status: { + queue, + message: !data.success ? data.output.error : undefined, + stage: data.success ? "complete" : "error", + code: data.code, + progress_data: data.progress_data, + eta: data.output.average_duration + }, + data: data.success ? data.output : null + }; + + case "process_starts": + return { + type: "update", + status: { + queue, + stage: "pending", + code: data.code, + size: data.rank, + position: 0, + success: data.success + } + }; + } + + return { type: "none", status: { stage: "error", queue } }; +} diff --git a/testbed/gradio-app__gradio/client/js/src/globals.d.ts b/testbed/gradio-app__gradio/client/js/src/globals.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..64966293360c00b9b6c18a347259650b92c93b91 --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/src/globals.d.ts @@ -0,0 +1,29 @@ +declare global { + interface Window { + __gradio_mode__: "app" | "website"; + gradio_config: Config; + __is_colab__: boolean; + __gradio_space__: string | null; + } +} + +export interface Config { + auth_required: boolean | undefined; + auth_message: string; + components: any[]; + css: string | null; + dependencies: any[]; + dev_mode: boolean; + enable_queue: boolean; + layout: any; + mode: "blocks" | "interface"; + root: string; + theme: string; + title: string; + version: string; + space_id: string | null; + is_colab: boolean; + show_api: boolean; + stylesheets: string[]; + path: string; +} diff --git a/testbed/gradio-app__gradio/client/js/src/index.ts b/testbed/gradio-app__gradio/client/js/src/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..66cff25fc76d1adfe586de27c4cddb98460c40d1 --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/src/index.ts @@ -0,0 +1,8 @@ +export { + client, + post_data, + upload_files, + duplicate, + api_factory +} from "./client.js"; +export type { SpaceStatus } from "./types.js"; diff --git a/testbed/gradio-app__gradio/client/js/src/types.ts b/testbed/gradio-app__gradio/client/js/src/types.ts new file mode 100644 index 0000000000000000000000000000000000000000..d77c22869f19d34a9d09da5b5cf3ee752665b015 --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/src/types.ts @@ -0,0 +1,116 @@ +export interface Config { + auth_required: boolean | undefined; + auth_message: string; + components: any[]; + css: string | null; + dependencies: any[]; + dev_mode: boolean; + enable_queue: boolean; + layout: any; + mode: "blocks" | "interface"; + root: string; + root_url?: string; + theme: string; + title: string; + version: string; + space_id: string | null; + is_colab: boolean; + show_api: boolean; + stylesheets: string[]; + path: string; +} + +export interface Payload { + data: unknown[]; + fn_index?: number; + event_data?: unknown; + time?: Date; +} + +export interface PostResponse { + error?: string; + [x: string]: any; +} +export interface UploadResponse { + error?: string; + files?: string[]; +} + +export interface Status { + queue: boolean; + code?: string; + success?: boolean; + stage: "pending" | "error" | "complete" | "generating"; + broken?: boolean; + size?: number; + position?: number; + eta?: number; + message?: string; + progress_data?: { + progress: number | null; + index: number | null; + length: number | null; + unit: string | null; + desc: string | null; + }[]; + time?: Date; +} + +export interface LogMessage { + log: string; + level: "warning" | "info"; +} + +export interface SpaceStatusNormal { + status: "sleeping" | "running" | "building" | "error" | "stopped"; + detail: + | "SLEEPING" + | "RUNNING" + | "RUNNING_BUILDING" + | "BUILDING" + | "NOT_FOUND"; + load_status: "pending" | "error" | "complete" | "generating"; + message: string; +} +export interface SpaceStatusError { + status: "space_error" | "paused"; + detail: + | "NO_APP_FILE" + | "CONFIG_ERROR" + | "BUILD_ERROR" + | "RUNTIME_ERROR" + | "PAUSED"; + load_status: "error"; + message: string; + discussions_enabled: boolean; +} +export type SpaceStatus = SpaceStatusNormal | SpaceStatusError; + +export type status_callback_function = (a: Status) => void; +export type SpaceStatusCallback = (a: SpaceStatus) => void; + +export type EventType = "data" | "status" | "log"; + +export interface EventMap { + data: Payload; + status: Status; + log: LogMessage; +} + +export type Event = { + [P in K]: EventMap[P] & { type: P; endpoint: string; fn_index: number }; +}[K]; +export type EventListener = (event: Event) => void; +export type ListenerMap = { + [P in K]?: EventListener[]; +}; +export interface FileData { + name: string; + orig_name?: string; + size?: number; + data: string; + blob?: File; + is_file?: boolean; + mime_type?: string; + alt_text?: string; +} diff --git a/testbed/gradio-app__gradio/client/js/src/utils.ts b/testbed/gradio-app__gradio/client/js/src/utils.ts new file mode 100644 index 0000000000000000000000000000000000000000..605dc291461f238df14935e97c6c528d51d133b6 --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/src/utils.ts @@ -0,0 +1,212 @@ +import type { Config } from "./types.js"; + +export function determine_protocol(endpoint: string): { + ws_protocol: "ws" | "wss"; + http_protocol: "http:" | "https:"; + host: string; +} { + if (endpoint.startsWith("http")) { + const { protocol, host } = new URL(endpoint); + + if (host.endsWith("hf.space")) { + return { + ws_protocol: "wss", + host: host, + http_protocol: protocol as "http:" | "https:" + }; + } + return { + ws_protocol: protocol === "https:" ? "wss" : "ws", + http_protocol: protocol as "http:" | "https:", + host + }; + } + + // default to secure if no protocol is provided + return { + ws_protocol: "wss", + http_protocol: "https:", + host: endpoint + }; +} + +export const RE_SPACE_NAME = /^[^\/]*\/[^\/]*$/; +export const RE_SPACE_DOMAIN = /.*hf\.space\/{0,1}$/; +export async function process_endpoint( + app_reference: string, + token?: `hf_${string}` +): Promise<{ + space_id: string | false; + host: string; + ws_protocol: "ws" | "wss"; + http_protocol: "http:" | "https:"; +}> { + const headers: { Authorization?: string } = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + + const _app_reference = app_reference.trim(); + + if (RE_SPACE_NAME.test(_app_reference)) { + try { + const res = await fetch( + `https://huggingface.co/api/spaces/${_app_reference}/host`, + { headers } + ); + + if (res.status !== 200) + throw new Error("Space metadata could not be loaded."); + const _host = (await res.json()).host; + + return { + space_id: app_reference, + ...determine_protocol(_host) + }; + } catch (e) { + throw new Error("Space metadata could not be loaded." + e.message); + } + } + + if (RE_SPACE_DOMAIN.test(_app_reference)) { + const { ws_protocol, http_protocol, host } = + determine_protocol(_app_reference); + + return { + space_id: host.replace(".hf.space", ""), + ws_protocol, + http_protocol, + host + }; + } + + return { + space_id: false, + ...determine_protocol(_app_reference) + }; +} + +export function map_names_to_ids( + fns: Config["dependencies"] +): Record { + let apis: Record = {}; + + fns.forEach(({ api_name }, i) => { + if (api_name) apis[api_name] = i; + }); + + return apis; +} + +const RE_DISABLED_DISCUSSION = + /^(?=[^]*\b[dD]iscussions{0,1}\b)(?=[^]*\b[dD]isabled\b)[^]*$/; +export async function discussions_enabled(space_id: string): Promise { + try { + const r = await fetch( + `https://huggingface.co/api/spaces/${space_id}/discussions`, + { + method: "HEAD" + } + ); + const error = r.headers.get("x-error-message"); + + if (error && RE_DISABLED_DISCUSSION.test(error)) return false; + return true; + } catch (e) { + return false; + } +} + +export async function get_space_hardware( + space_id: string, + token: `hf_${string}` +): Promise<(typeof hardware_types)[number]> { + const headers: { Authorization?: string } = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + + try { + const res = await fetch( + `https://huggingface.co/api/spaces/${space_id}/runtime`, + { headers } + ); + + if (res.status !== 200) + throw new Error("Space hardware could not be obtained."); + + const { hardware } = await res.json(); + + return hardware; + } catch (e) { + throw new Error(e.message); + } +} + +export async function set_space_hardware( + space_id: string, + new_hardware: (typeof hardware_types)[number], + token: `hf_${string}` +): Promise<(typeof hardware_types)[number]> { + const headers: { Authorization?: string } = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + + try { + const res = await fetch( + `https://huggingface.co/api/spaces/${space_id}/hardware`, + { headers, body: JSON.stringify(new_hardware) } + ); + + if (res.status !== 200) + throw new Error( + "Space hardware could not be set. Please ensure the space hardware provided is valid and that a Hugging Face token is passed in." + ); + + const { hardware } = await res.json(); + + return hardware; + } catch (e) { + throw new Error(e.message); + } +} + +export async function set_space_timeout( + space_id: string, + timeout: number, + token: `hf_${string}` +): Promise { + const headers: { Authorization?: string } = {}; + if (token) { + headers.Authorization = `Bearer ${token}`; + } + + try { + const res = await fetch( + `https://huggingface.co/api/spaces/${space_id}/hardware`, + { headers, body: JSON.stringify({ seconds: timeout }) } + ); + + if (res.status !== 200) + throw new Error( + "Space hardware could not be set. Please ensure the space hardware provided is valid and that a Hugging Face token is passed in." + ); + + const { hardware } = await res.json(); + + return hardware; + } catch (e) { + throw new Error(e.message); + } +} + +export const hardware_types = [ + "cpu-basic", + "cpu-upgrade", + "t4-small", + "t4-medium", + "a10g-small", + "a10g-large", + "a100-large" +] as const; diff --git a/testbed/gradio-app__gradio/client/js/tsconfig.json b/testbed/gradio-app__gradio/client/js/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..226efa095213f4aa804f8e467e6fdab5c2504621 --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/tsconfig.json @@ -0,0 +1,14 @@ +{ + "include": ["src/**/*"], + "exclude": ["src/**/*.test.ts", "src/**/*.node-test.ts"], + "compilerOptions": { + "allowJs": true, + "declaration": true, + "emitDeclarationOnly": true, + "outDir": "dist", + "declarationMap": true, + "module": "es2020", + "moduleResolution": "node16", + "skipDefaultLibCheck": true + } +} diff --git a/testbed/gradio-app__gradio/client/js/vite.config.js b/testbed/gradio-app__gradio/client/js/vite.config.js new file mode 100644 index 0000000000000000000000000000000000000000..5edaed279e65b50387c404a951ab8e2b0a7d83fa --- /dev/null +++ b/testbed/gradio-app__gradio/client/js/vite.config.js @@ -0,0 +1,23 @@ +import { defineConfig } from "vite"; + +export default defineConfig({ + build: { + // minify: true, + lib: { + entry: "src/index.ts", + formats: ["es"] + }, + rollupOptions: { + input: "src/index.ts", + output: { + dir: "dist" + } + } + }, + + ssr: { + target: "node", + format: "esm", + noExternal: ["ws", "semiver"] + } +}); diff --git a/testbed/gradio-app__gradio/client/python/CHANGELOG.md b/testbed/gradio-app__gradio/client/python/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..6fd91a865825cc16658d165fbdb0c9b9026b1c4e --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/CHANGELOG.md @@ -0,0 +1,402 @@ +# gradio_client + +## 0.5.2 + +### Features + +- [#5653](https://github.com/gradio-app/gradio/pull/5653) [`ea0e00b20`](https://github.com/gradio-app/gradio/commit/ea0e00b207b4b90a10e9d054c4202d4e705a29ba) - Prevent Clients from accessing API endpoints that set `api_name=False`. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 0.5.1 + +### Features + +- [#5514](https://github.com/gradio-app/gradio/pull/5514) [`52f783175`](https://github.com/gradio-app/gradio/commit/52f7831751b432411e109bd41add4ab286023a8e) - refactor: Use package.json for version management. Thanks [@DarhkVoyd](https://github.com/DarhkVoyd)! + +## 0.5.0 + +### Highlights + +#### Enable streaming audio in python client ([#5248](https://github.com/gradio-app/gradio/pull/5248) [`390624d8`](https://github.com/gradio-app/gradio/commit/390624d8ad2b1308a5bf8384435fd0db98d8e29e)) + +The `gradio_client` now supports streaming file outputs 🌊 + +No new syntax! Connect to a gradio demo that supports streaming file outputs and call `predict` or `submit` as you normally would. + +```python +import gradio_client as grc +client = grc.Client("gradio/stream_audio_out") + +# Get the entire generated audio as a local file +client.predict("/Users/freddy/Pictures/bark_demo.mp4", api_name="/predict") + +job = client.submit("/Users/freddy/Pictures/bark_demo.mp4", api_name="/predict") + +# Get the entire generated audio as a local file +job.result() + +# Each individual chunk +job.outputs() +``` + + Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Fixes + +- [#5295](https://github.com/gradio-app/gradio/pull/5295) [`7b8fa8aa`](https://github.com/gradio-app/gradio/commit/7b8fa8aa58f95f5046b9add64b40368bd3f1b700) - Allow caching examples with streamed output. Thanks [@aliabid94](https://github.com/aliabid94)! + +## 0.4.0 + +### Highlights + +#### Client.predict will now return the final output for streaming endpoints ([#5057](https://github.com/gradio-app/gradio/pull/5057) [`35856f8b`](https://github.com/gradio-app/gradio/commit/35856f8b54548cae7bd3b8d6a4de69e1748283b2)) + +### This is a breaking change (for gradio_client only)! + +Previously, `Client.predict` would only return the first output of an endpoint that streamed results. This was causing confusion for developers that wanted to call these streaming demos via the client. + +We realize that developers using the client don't know the internals of whether a demo streams or not, so we're changing the behavior of predict to match developer expectations. + +Using `Client.predict` will now return the final output of a streaming endpoint. This will make it even easier to use gradio apps via the client. + + Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Features + +- [#5076](https://github.com/gradio-app/gradio/pull/5076) [`2745075a`](https://github.com/gradio-app/gradio/commit/2745075a26f80e0e16863d483401ff1b6c5ada7a) - Add deploy_discord to docs. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Fixes + +- [#5061](https://github.com/gradio-app/gradio/pull/5061) [`136adc9c`](https://github.com/gradio-app/gradio/commit/136adc9ccb23e5cb4d02d2e88f23f0b850041f98) - Ensure `gradio_client` is backwards compatible with `gradio==3.24.1`. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 0.3.0 + +### Highlights + +#### Create Discord Bots from Gradio Apps 🤖 ([#4960](https://github.com/gradio-app/gradio/pull/4960) [`46e4ef67`](https://github.com/gradio-app/gradio/commit/46e4ef67d287dd68a91473b73172b29cbad064bc)) + +We're excited to announce that Gradio can now automatically create a discord bot from any `gr.ChatInterface` app. + +It's as easy as importing `gradio_client`, connecting to the app, and calling `deploy_discord`! + +_🦙 Turning Llama 2 70b into a discord bot 🦙_ + +```python +import gradio_client as grc +grc.Client("ysharma/Explore_llamav2_with_TGI").deploy_discord(to_id="llama2-70b-discord-bot") +``` + + + +#### Getting started with template spaces + +To help get you started, we have created an organization on Hugging Face called [gradio-discord-bots](https://huggingface.co/gradio-discord-bots) with template spaces you can use to turn state of the art LLMs powered by Gradio to discord bots. + +Currently we have template spaces for: + +- [Llama-2-70b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-70b-chat-hf) powered by a FREE Hugging Face Inference Endpoint! +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-13b-chat-hf) powered by Hugging Face Inference Endpoints. +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/llama-2-13b-chat-transformers) powered by Hugging Face transformers. +- [falcon-7b-instruct](https://huggingface.co/spaces/gradio-discord-bots/falcon-7b-instruct) powered by Hugging Face Inference Endpoints. +- [gpt-3.5-turbo](https://huggingface.co/spaces/gradio-discord-bots/gpt-35-turbo), powered by openai. Requires an OpenAI key. + +But once again, you can deploy ANY `gr.ChatInterface` app exposed on the internet! So don't hesitate to try it on your own Chatbots. + +❗️ Additional Note ❗️: Technically, any gradio app that exposes an api route that takes in a single string and outputs a single string can be deployed to discord. But `gr.ChatInterface` apps naturally lend themselves to discord's chat functionality so we suggest you start with those. + +Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### New Features: + +- Endpoints that return layout components are now properly handled in the `submit` and `view_api` methods. Output layout components are not returned by the API but all other components are (excluding `gr.State`). By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4871](https://github.com/gradio-app/gradio/pull/4871) + +### Bug Fixes: + +No changes to highlight + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.9 + +### New Features: + +No changes to highlight + +### Bug Fixes: + +- Fix bug determining the api name when a demo has `api_name=False` by [@freddyboulton](https://github.com/freddyaboulton) in [PR 4886](https://github.com/gradio-app/gradio/pull/4886) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Pinned dependencies to major versions to reduce the likelihood of a broken `gradio_client` due to changes in downstream dependencies by [@abidlabs](https://github.com/abidlabs) in [PR 4885](https://github.com/gradio-app/gradio/pull/4885) + +# 0.2.8 + +### New Features: + +- Support loading gradio apps where `api_name=False` by [@abidlabs](https://github.com/abidlabs) in [PR 4683](https://github.com/gradio-app/gradio/pull/4683) + +### Bug Fixes: + +- Fix bug where space duplication would error if the demo has cpu-basic hardware by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4583](https://github.com/gradio-app/gradio/pull/4583) +- Fixes and optimizations to URL/download functions by [@akx](https://github.com/akx) in [PR 4695](https://github.com/gradio-app/gradio/pull/4695) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.7 + +### New Features: + +- The output directory for files downloaded via the Client can now be set by the `output_dir` parameter in `Client` by [@abidlabs](https://github.com/abidlabs) in [PR 4501](https://github.com/gradio-app/gradio/pull/4501) + +### Bug Fixes: + +- The output directory for files downloaded via the Client are now set to a temporary directory by default (instead of the working directory in some cases) by [@abidlabs](https://github.com/abidlabs) in [PR 4501](https://github.com/gradio-app/gradio/pull/4501) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.6 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixed bug file deserialization didn't preserve all file extensions by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4440](https://github.com/gradio-app/gradio/pull/4440) +- Fixed bug where mounted apps could not be called via the client by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4435](https://github.com/gradio-app/gradio/pull/4435) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.5 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixes parameter names not showing underscores by [@abidlabs](https://github.com/abidlabs) in [PR 4230](https://github.com/gradio-app/gradio/pull/4230) +- Fixes issue in which state was not handled correctly if `serialize=False` by [@abidlabs](https://github.com/abidlabs) in [PR 4230](https://github.com/gradio-app/gradio/pull/4230) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.4 + +### Bug Fixes: + +- Fixes missing serialization classes for several components: `Barplot`, `Lineplot`, `Scatterplot`, `AnnotatedImage`, `Interpretation` by [@abidlabs](https://github.com/abidlabs) in [PR 4167](https://github.com/gradio-app/gradio/pull/4167) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.2.3 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fix example inputs for `gr.File(file_count='multiple')` output components by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4153](https://github.com/gradio-app/gradio/pull/4153) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.2.2 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Only send request to `/info` route if demo version is above `3.28.3` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4109](https://github.com/gradio-app/gradio/pull/4109) + +### Other Changes: + +- Fix bug in test from gradio 3.29.0 refactor by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4138](https://github.com/gradio-app/gradio/pull/4138) + +### Breaking Changes: + +No changes to highlight. + +# 0.2.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +Removes extraneous `State` component info from the `Client.view_api()` method by [@abidlabs](https://github.com/freddyaboulton) in [PR 4107](https://github.com/gradio-app/gradio/pull/4107) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +Separates flaky tests from non-flaky tests by [@abidlabs](https://github.com/freddyaboulton) in [PR 4107](https://github.com/gradio-app/gradio/pull/4107) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.1.4 + +### New Features: + +- Progress Updates from `gr.Progress()` can be accessed via `job.status().progress_data` by @freddyaboulton](https://github.com/freddyaboulton) in [PR 3924](https://github.com/gradio-app/gradio/pull/3924) + +### Bug Fixes: + +- Fixed bug where unnamed routes where displayed with `api_name` instead of `fn_index` in `view_api` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3972](https://github.com/gradio-app/gradio/pull/3972) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.1.3 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixed bug where `Video` components in latest gradio were not able to be deserialized by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3860](https://github.com/gradio-app/gradio/pull/3860) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.1.2 + +First public release of the Gradio Client library! The `gradio_client` Python library that makes it very easy to use any Gradio app as an API. + +As an example, consider this [Hugging Face Space that transcribes audio files](https://huggingface.co/spaces/abidlabs/whisper) that are recorded from the microphone. + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/whisper-screenshot.jpg) + +Using the `gradio_client` library, we can easily use the Gradio as an API to transcribe audio files programmatically. + +Here's the entire code to do it: + +```python +from gradio_client import Client + +client = Client("abidlabs/whisper") +client.predict("audio_sample.wav") + +>> "This is a test of the whisper speech recognition model." +``` + +Read more about how to use the `gradio_client` library here: https://gradio.app/getting-started-with-the-python-client/ \ No newline at end of file diff --git a/testbed/gradio-app__gradio/client/python/README.md b/testbed/gradio-app__gradio/client/python/README.md new file mode 100644 index 0000000000000000000000000000000000000000..52d7f34576587e2f7ece5e9daed93eb9f31ada72 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/README.md @@ -0,0 +1,143 @@ +# `gradio_client`: Use a Gradio app as an API -- in 3 lines of Python + +This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API. + +As an example, consider this [Hugging Face Space that transcribes audio files](https://huggingface.co/spaces/abidlabs/whisper) that are recorded from the microphone. + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/whisper-screenshot.jpg) + +Using the `gradio_client` library, we can easily use the Gradio as an API to transcribe audio files programmatically. + +Here's the entire code to do it: + +```python +from gradio_client import Client + +client = Client("abidlabs/whisper") +client.predict("audio_sample.wav") + +>> "This is a test of the whisper speech recognition model." +``` + +The Gradio client works with any Gradio Space, whether it be an image generator, a stateful chatbot, or a tax calculator. + +## Installation + +If you already have a recent version of `gradio`, then the `gradio_client` is included as a dependency. + +Otherwise, the lightweight `gradio_client` package can be installed from pip (or pip3) and works with Python versions 3.8 or higher: + +```bash +$ pip install gradio_client +``` + +## Basic Usage + +### Connecting to a Space or a Gradio app + +Start by connecting instantiating a `Client` object and connecting it to a Gradio app that is running on Spaces (or anywhere else)! + +**Connecting to a Space** + +```python +from gradio_client import Client + +client = Client("abidlabs/en2fr") # a Space that translates from English to French +``` + +You can also connect to private Spaces by passing in your HF token with the `hf_token` parameter. You can get your HF token here: https://huggingface.co/settings/tokens + +```python +from gradio_client import Client + +client = Client("abidlabs/my-private-space", hf_token="...") +``` + +**Duplicating a Space for private use** + +While you can use any public Space as an API, you may get rate limited by Hugging Face if you make too many requests. For unlimited usage of a Space, simply duplicate the Space to create a private Space, +and then use it to make as many requests as you'd like! + +The `gradio_client` includes a class method: `Client.duplicate()` to make this process simple: + +```python +from gradio_client import Client + +client = Client.duplicate("abidlabs/whisper") +client.predict("audio_sample.wav") + +>> "This is a test of the whisper speech recognition model." +``` + +If you have previously duplicated a Space, re-running `duplicate()` will _not_ create a new Space. Instead, the Client will attach to the previously-created Space. So it is safe to re-run the `Client.duplicate()` method multiple times. + +**Note:** if the original Space uses GPUs, your private Space will as well, and your Hugging Face account will get billed based on the price of the GPU. To minimize charges, your Space will automatically go to sleep after 1 hour of inactivity. You can also set the hardware using the `hardware` parameter of `duplicate()`. + +**Connecting a general Gradio app** + +If your app is running somewhere else, just provide the full URL instead, including the "http://" or "https://". Here's an example of making predictions to a Gradio app that is running on a share URL: + +```python +from gradio_client import Client + +client = Client("https://bec81a83-5b5c-471e.gradio.live") +``` + +### Inspecting the API endpoints + +Once you have connected to a Gradio app, you can view the APIs that are available to you by calling the `.view_api()` method. For the Whisper Space, we see the following: + +``` +Client.predict() Usage Info +--------------------------- +Named API endpoints: 1 + + - predict(input_audio, api_name="/predict") -> value_0 + Parameters: + - [Audio] input_audio: str (filepath or URL) + Returns: + - [Textbox] value_0: str (value) +``` + +This shows us that we have 1 API endpoint in this space, and shows us how to use the API endpoint to make a prediction: we should call the `.predict()` method, providing a parameter `input_audio` of type `str`, which is a `filepath or URL`. + +We should also provide the `api_name='/predict'` argument. Although this isn't necessary if a Gradio app has a single named endpoint, it does allow us to call different endpoints in a single app if they are available. If an app has unnamed API endpoints, these can also be displayed by running `.view_api(all_endpoints=True)`. + +### Making a prediction + +The simplest way to make a prediction is simply to call the `.predict()` function with the appropriate arguments: + +```python +from gradio_client import Client + +client = Client("abidlabs/en2fr") +client.predict("Hello") + +>> Bonjour +``` + +If there are multiple parameters, then you should pass them as separate arguments to `.predict()`, like this: + +```python +from gradio_client import Client + +client = Client("gradio/calculator") +client.predict(4, "add", 5) + +>> 9.0 +``` + +For certain inputs, such as images, you should pass in the filepath or URL to the file. Likewise, for the corresponding output types, you will get a filepath or URL returned. + +```python +from gradio_client import Client + +client = Client("abidlabs/whisper") +client.predict("https://audio-samples.github.io/samples/mp3/blizzard_unconditional/sample-0.mp3") + +>> "My thought I have nobody by a beauty and will as you poured. Mr. Rochester is serve in that so don't find simpus, and devoted abode, to at might in a r—" +``` + +## Advanced Usage + +For more ways to use the Gradio Python Client, check out our dedicated Guide on the Python client, available here: https://www.gradio.app/getting-started-with-the-python-client/ diff --git a/testbed/gradio-app__gradio/client/python/build_pypi.sh b/testbed/gradio-app__gradio/client/python/build_pypi.sh new file mode 100644 index 0000000000000000000000000000000000000000..00068e11da1efa3e7359f4c0ad4fca6b4462371a --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/build_pypi.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -e + +cd "$(dirname ${0})" + +python3 -m pip install build +rm -rf dist/* +rm -rf build/* +python3 -m build diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/CHANGELOG.md b/testbed/gradio-app__gradio/client/python/gradio_client/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..6fd91a865825cc16658d165fbdb0c9b9026b1c4e --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/CHANGELOG.md @@ -0,0 +1,402 @@ +# gradio_client + +## 0.5.2 + +### Features + +- [#5653](https://github.com/gradio-app/gradio/pull/5653) [`ea0e00b20`](https://github.com/gradio-app/gradio/commit/ea0e00b207b4b90a10e9d054c4202d4e705a29ba) - Prevent Clients from accessing API endpoints that set `api_name=False`. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 0.5.1 + +### Features + +- [#5514](https://github.com/gradio-app/gradio/pull/5514) [`52f783175`](https://github.com/gradio-app/gradio/commit/52f7831751b432411e109bd41add4ab286023a8e) - refactor: Use package.json for version management. Thanks [@DarhkVoyd](https://github.com/DarhkVoyd)! + +## 0.5.0 + +### Highlights + +#### Enable streaming audio in python client ([#5248](https://github.com/gradio-app/gradio/pull/5248) [`390624d8`](https://github.com/gradio-app/gradio/commit/390624d8ad2b1308a5bf8384435fd0db98d8e29e)) + +The `gradio_client` now supports streaming file outputs 🌊 + +No new syntax! Connect to a gradio demo that supports streaming file outputs and call `predict` or `submit` as you normally would. + +```python +import gradio_client as grc +client = grc.Client("gradio/stream_audio_out") + +# Get the entire generated audio as a local file +client.predict("/Users/freddy/Pictures/bark_demo.mp4", api_name="/predict") + +job = client.submit("/Users/freddy/Pictures/bark_demo.mp4", api_name="/predict") + +# Get the entire generated audio as a local file +job.result() + +# Each individual chunk +job.outputs() +``` + + Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Fixes + +- [#5295](https://github.com/gradio-app/gradio/pull/5295) [`7b8fa8aa`](https://github.com/gradio-app/gradio/commit/7b8fa8aa58f95f5046b9add64b40368bd3f1b700) - Allow caching examples with streamed output. Thanks [@aliabid94](https://github.com/aliabid94)! + +## 0.4.0 + +### Highlights + +#### Client.predict will now return the final output for streaming endpoints ([#5057](https://github.com/gradio-app/gradio/pull/5057) [`35856f8b`](https://github.com/gradio-app/gradio/commit/35856f8b54548cae7bd3b8d6a4de69e1748283b2)) + +### This is a breaking change (for gradio_client only)! + +Previously, `Client.predict` would only return the first output of an endpoint that streamed results. This was causing confusion for developers that wanted to call these streaming demos via the client. + +We realize that developers using the client don't know the internals of whether a demo streams or not, so we're changing the behavior of predict to match developer expectations. + +Using `Client.predict` will now return the final output of a streaming endpoint. This will make it even easier to use gradio apps via the client. + + Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Features + +- [#5076](https://github.com/gradio-app/gradio/pull/5076) [`2745075a`](https://github.com/gradio-app/gradio/commit/2745075a26f80e0e16863d483401ff1b6c5ada7a) - Add deploy_discord to docs. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Fixes + +- [#5061](https://github.com/gradio-app/gradio/pull/5061) [`136adc9c`](https://github.com/gradio-app/gradio/commit/136adc9ccb23e5cb4d02d2e88f23f0b850041f98) - Ensure `gradio_client` is backwards compatible with `gradio==3.24.1`. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 0.3.0 + +### Highlights + +#### Create Discord Bots from Gradio Apps 🤖 ([#4960](https://github.com/gradio-app/gradio/pull/4960) [`46e4ef67`](https://github.com/gradio-app/gradio/commit/46e4ef67d287dd68a91473b73172b29cbad064bc)) + +We're excited to announce that Gradio can now automatically create a discord bot from any `gr.ChatInterface` app. + +It's as easy as importing `gradio_client`, connecting to the app, and calling `deploy_discord`! + +_🦙 Turning Llama 2 70b into a discord bot 🦙_ + +```python +import gradio_client as grc +grc.Client("ysharma/Explore_llamav2_with_TGI").deploy_discord(to_id="llama2-70b-discord-bot") +``` + + + +#### Getting started with template spaces + +To help get you started, we have created an organization on Hugging Face called [gradio-discord-bots](https://huggingface.co/gradio-discord-bots) with template spaces you can use to turn state of the art LLMs powered by Gradio to discord bots. + +Currently we have template spaces for: + +- [Llama-2-70b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-70b-chat-hf) powered by a FREE Hugging Face Inference Endpoint! +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-13b-chat-hf) powered by Hugging Face Inference Endpoints. +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/llama-2-13b-chat-transformers) powered by Hugging Face transformers. +- [falcon-7b-instruct](https://huggingface.co/spaces/gradio-discord-bots/falcon-7b-instruct) powered by Hugging Face Inference Endpoints. +- [gpt-3.5-turbo](https://huggingface.co/spaces/gradio-discord-bots/gpt-35-turbo), powered by openai. Requires an OpenAI key. + +But once again, you can deploy ANY `gr.ChatInterface` app exposed on the internet! So don't hesitate to try it on your own Chatbots. + +❗️ Additional Note ❗️: Technically, any gradio app that exposes an api route that takes in a single string and outputs a single string can be deployed to discord. But `gr.ChatInterface` apps naturally lend themselves to discord's chat functionality so we suggest you start with those. + +Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### New Features: + +- Endpoints that return layout components are now properly handled in the `submit` and `view_api` methods. Output layout components are not returned by the API but all other components are (excluding `gr.State`). By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4871](https://github.com/gradio-app/gradio/pull/4871) + +### Bug Fixes: + +No changes to highlight + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.9 + +### New Features: + +No changes to highlight + +### Bug Fixes: + +- Fix bug determining the api name when a demo has `api_name=False` by [@freddyboulton](https://github.com/freddyaboulton) in [PR 4886](https://github.com/gradio-app/gradio/pull/4886) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Pinned dependencies to major versions to reduce the likelihood of a broken `gradio_client` due to changes in downstream dependencies by [@abidlabs](https://github.com/abidlabs) in [PR 4885](https://github.com/gradio-app/gradio/pull/4885) + +# 0.2.8 + +### New Features: + +- Support loading gradio apps where `api_name=False` by [@abidlabs](https://github.com/abidlabs) in [PR 4683](https://github.com/gradio-app/gradio/pull/4683) + +### Bug Fixes: + +- Fix bug where space duplication would error if the demo has cpu-basic hardware by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4583](https://github.com/gradio-app/gradio/pull/4583) +- Fixes and optimizations to URL/download functions by [@akx](https://github.com/akx) in [PR 4695](https://github.com/gradio-app/gradio/pull/4695) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.7 + +### New Features: + +- The output directory for files downloaded via the Client can now be set by the `output_dir` parameter in `Client` by [@abidlabs](https://github.com/abidlabs) in [PR 4501](https://github.com/gradio-app/gradio/pull/4501) + +### Bug Fixes: + +- The output directory for files downloaded via the Client are now set to a temporary directory by default (instead of the working directory in some cases) by [@abidlabs](https://github.com/abidlabs) in [PR 4501](https://github.com/gradio-app/gradio/pull/4501) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.6 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixed bug file deserialization didn't preserve all file extensions by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4440](https://github.com/gradio-app/gradio/pull/4440) +- Fixed bug where mounted apps could not be called via the client by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4435](https://github.com/gradio-app/gradio/pull/4435) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.5 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixes parameter names not showing underscores by [@abidlabs](https://github.com/abidlabs) in [PR 4230](https://github.com/gradio-app/gradio/pull/4230) +- Fixes issue in which state was not handled correctly if `serialize=False` by [@abidlabs](https://github.com/abidlabs) in [PR 4230](https://github.com/gradio-app/gradio/pull/4230) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +# 0.2.4 + +### Bug Fixes: + +- Fixes missing serialization classes for several components: `Barplot`, `Lineplot`, `Scatterplot`, `AnnotatedImage`, `Interpretation` by [@abidlabs](https://github.com/abidlabs) in [PR 4167](https://github.com/gradio-app/gradio/pull/4167) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.2.3 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fix example inputs for `gr.File(file_count='multiple')` output components by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4153](https://github.com/gradio-app/gradio/pull/4153) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.2.2 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Only send request to `/info` route if demo version is above `3.28.3` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4109](https://github.com/gradio-app/gradio/pull/4109) + +### Other Changes: + +- Fix bug in test from gradio 3.29.0 refactor by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4138](https://github.com/gradio-app/gradio/pull/4138) + +### Breaking Changes: + +No changes to highlight. + +# 0.2.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +Removes extraneous `State` component info from the `Client.view_api()` method by [@abidlabs](https://github.com/freddyaboulton) in [PR 4107](https://github.com/gradio-app/gradio/pull/4107) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +Separates flaky tests from non-flaky tests by [@abidlabs](https://github.com/freddyaboulton) in [PR 4107](https://github.com/gradio-app/gradio/pull/4107) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.1.4 + +### New Features: + +- Progress Updates from `gr.Progress()` can be accessed via `job.status().progress_data` by @freddyaboulton](https://github.com/freddyaboulton) in [PR 3924](https://github.com/gradio-app/gradio/pull/3924) + +### Bug Fixes: + +- Fixed bug where unnamed routes where displayed with `api_name` instead of `fn_index` in `view_api` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3972](https://github.com/gradio-app/gradio/pull/3972) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.1.3 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixed bug where `Video` components in latest gradio were not able to be deserialized by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3860](https://github.com/gradio-app/gradio/pull/3860) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +# 0.1.2 + +First public release of the Gradio Client library! The `gradio_client` Python library that makes it very easy to use any Gradio app as an API. + +As an example, consider this [Hugging Face Space that transcribes audio files](https://huggingface.co/spaces/abidlabs/whisper) that are recorded from the microphone. + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/whisper-screenshot.jpg) + +Using the `gradio_client` library, we can easily use the Gradio as an API to transcribe audio files programmatically. + +Here's the entire code to do it: + +```python +from gradio_client import Client + +client = Client("abidlabs/whisper") +client.predict("audio_sample.wav") + +>> "This is a test of the whisper speech recognition model." +``` + +Read more about how to use the `gradio_client` library here: https://gradio.app/getting-started-with-the-python-client/ \ No newline at end of file diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/__init__.py b/testbed/gradio-app__gradio/client/python/gradio_client/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..01d6b290be59acf63e1309e508927ddf060cfc40 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/__init__.py @@ -0,0 +1,7 @@ +from gradio_client.client import Client +from gradio_client.utils import __version__ + +__all__ = [ + "Client", + "__version__", +] diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/cli/__init__.py b/testbed/gradio-app__gradio/client/python/gradio_client/cli/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0c796253489147f941b78b5bb04a82935a72edab --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/cli/__init__.py @@ -0,0 +1,3 @@ +from gradio_client.cli import deploy_discord + +__all__ = ["deploy_discord"] diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/cli/deploy_discord.py b/testbed/gradio-app__gradio/client/python/gradio_client/cli/deploy_discord.py new file mode 100644 index 0000000000000000000000000000000000000000..bb26c088abeef28313142db9d58213a38d5581b4 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/cli/deploy_discord.py @@ -0,0 +1,58 @@ +import argparse + +from gradio_client import Client + + +def main(): + parser = argparse.ArgumentParser(description="Deploy Space as Discord Bot.") + parser.add_argument("deploy-discord") + parser.add_argument( + "--src", + type=str, + help="The space id or url or gradio app you want to deploy as a gradio bot.", + ) + parser.add_argument( + "--discord-bot-token", + type=str, + help="Discord bot token. Get one on the discord website.", + ) + parser.add_argument( + "--api-names", + nargs="*", + help="Api names to turn into discord bots", + default=[], + ) + parser.add_argument( + "--to-id", + type=str, + help="Name of the space used to host the discord bot", + default=None, + ) + parser.add_argument( + "--hf-token", + type=str, + help=( + "Hugging Face token. Can be ommitted if you are logged in via huggingface_hub cli. " + "Must be provided if upstream space is private." + ), + default=None, + ) + parser.add_argument( + "--private", + type=bool, + nargs="?", + help="Whether the discord bot space is private.", + const=True, + default=False, + ) + args = parser.parse_args() + for i, name in enumerate(args.api_names): + if "," in name: + args.api_names[i] = tuple(name.split(",")) + Client(args.src).deploy_discord( + discord_bot_token=args.discord_bot_token, + api_names=args.api_names, + to_id=args.to_id, + hf_token=args.hf_token, + private=args.private, + ) diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/client.py b/testbed/gradio-app__gradio/client/python/gradio_client/client.py new file mode 100644 index 0000000000000000000000000000000000000000..bccb303fb42caef1e60cd5a5a2c01d9d63595192 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/client.py @@ -0,0 +1,1251 @@ +"""The main Client class for the Python client.""" +from __future__ import annotations + +import concurrent.futures +import json +import os +import re +import secrets +import tempfile +import threading +import time +import urllib.parse +import uuid +import warnings +from concurrent.futures import Future +from datetime import datetime +from pathlib import Path +from threading import Lock +from typing import Any, Callable, Literal + +import huggingface_hub +import requests +import websockets +from huggingface_hub import CommitOperationAdd, SpaceHardware, SpaceStage +from huggingface_hub.utils import ( + RepositoryNotFoundError, + build_hf_headers, + send_telemetry, +) +from packaging import version + +from gradio_client import serializing, utils +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import Serializable +from gradio_client.utils import ( + Communicator, + JobStatus, + Status, + StatusUpdate, +) + +set_documentation_group("py-client") + + +DEFAULT_TEMP_DIR = os.environ.get("GRADIO_TEMP_DIR") or str( + Path(tempfile.gettempdir()) / "gradio" +) + + +@document("predict", "submit", "view_api", "duplicate", "deploy_discord") +class Client: + """ + The main Client class for the Python client. This class is used to connect to a remote Gradio app and call its API endpoints. + + Example: + from gradio_client import Client + + client = Client("abidlabs/whisper-large-v2") # connecting to a Hugging Face Space + client.predict("test.mp4", api_name="/predict") + >> What a nice recording! # returns the result of the remote API call + + client = Client("https://bec81a83-5b5c-471e.gradio.live") # connecting to a temporary Gradio share URL + job = client.submit("hello", api_name="/predict") # runs the prediction in a background thread + job.result() + >> 49 # returns the result of the remote API call (blocking call) + """ + + def __init__( + self, + src: str, + hf_token: str | None = None, + max_workers: int = 40, + serialize: bool = True, + output_dir: str | Path | None = DEFAULT_TEMP_DIR, + verbose: bool = True, + ): + """ + Parameters: + src: Either the name of the Hugging Face Space to load, (e.g. "abidlabs/whisper-large-v2") or the full URL (including "http" or "https") of the hosted Gradio app to load (e.g. "http://mydomain.com/app" or "https://bec81a83-5b5c-471e.gradio.live/"). + hf_token: The Hugging Face token to use to access private Spaces. Automatically fetched if you are logged in via the Hugging Face Hub CLI. Obtain from: https://huggingface.co/settings/token + max_workers: The maximum number of thread workers that can be used to make requests to the remote Gradio app simultaneously. + serialize: Whether the client should serialize the inputs and deserialize the outputs of the remote API. If set to False, the client will pass the inputs and outputs as-is, without serializing/deserializing them. E.g. you if you set this to False, you'd submit an image in base64 format instead of a filepath, and you'd get back an image in base64 format from the remote API instead of a filepath. + output_dir: The directory to save files that are downloaded from the remote API. If None, reads from the GRADIO_TEMP_DIR environment variable. Defaults to a temporary directory on your machine. + verbose: Whether the client should print statements to the console. + """ + self.verbose = verbose + self.hf_token = hf_token + self.serialize = serialize + self.headers = build_hf_headers( + token=hf_token, + library_name="gradio_client", + library_version=utils.__version__, + ) + self.space_id = None + self.output_dir = output_dir + + if src.startswith("http://") or src.startswith("https://"): + _src = src if src.endswith("/") else src + "/" + else: + _src = self._space_name_to_src(src) + if _src is None: + raise ValueError( + f"Could not find Space: {src}. If it is a private Space, please provide an hf_token." + ) + self.space_id = src + self.src = _src + state = self._get_space_state() + if state == SpaceStage.BUILDING: + if self.verbose: + print("Space is still building. Please wait...") + while self._get_space_state() == SpaceStage.BUILDING: + time.sleep(2) # so we don't get rate limited by the API + pass + if state in utils.INVALID_RUNTIME: + raise ValueError( + f"The current space is in the invalid state: {state}. " + "Please contact the owner to fix this." + ) + if self.verbose: + print(f"Loaded as API: {self.src} ✔") + + self.api_url = urllib.parse.urljoin(self.src, utils.API_URL) + self.ws_url = urllib.parse.urljoin( + self.src.replace("http", "ws", 1), utils.WS_URL + ) + self.upload_url = urllib.parse.urljoin(self.src, utils.UPLOAD_URL) + self.reset_url = urllib.parse.urljoin(self.src, utils.RESET_URL) + self.config = self._get_config() + self.session_hash = str(uuid.uuid4()) + + self.endpoints = [ + Endpoint(self, fn_index, dependency) + for fn_index, dependency in enumerate(self.config["dependencies"]) + ] + + # Create a pool of threads to handle the requests + self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) + + # Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1 + threading.Thread(target=self._telemetry_thread).start() + + @classmethod + def duplicate( + cls, + from_id: str, + to_id: str | None = None, + hf_token: str | None = None, + private: bool = True, + hardware: Literal[ + "cpu-basic", + "cpu-upgrade", + "t4-small", + "t4-medium", + "a10g-small", + "a10g-large", + "a100-large", + ] + | SpaceHardware + | None = None, + secrets: dict[str, str] | None = None, + sleep_timeout: int = 5, + max_workers: int = 40, + verbose: bool = True, + ): + """ + Duplicates a Hugging Face Space under your account and returns a Client object + for the new Space. No duplication is created if the Space already exists in your + account (to override this, provide a new name for the new Space using `to_id`). + To use this method, you must provide an `hf_token` or be logged in via the Hugging + Face Hub CLI. + + The new Space will be private by default and use the same hardware as the original + Space. This can be changed by using the `private` and `hardware` parameters. For + hardware upgrades (beyond the basic CPU tier), you may be required to provide + billing information on Hugging Face: https://huggingface.co/settings/billing + + Parameters: + from_id: The name of the Hugging Face Space to duplicate in the format "{username}/{space_id}", e.g. "gradio/whisper". + to_id: The name of the new Hugging Face Space to create, e.g. "abidlabs/whisper-duplicate". If not provided, the new Space will be named "{your_HF_username}/{space_id}". + hf_token: The Hugging Face token to use to access private Spaces. Automatically fetched if you are logged in via the Hugging Face Hub CLI. Obtain from: https://huggingface.co/settings/token + private: Whether the new Space should be private (True) or public (False). Defaults to True. + hardware: The hardware tier to use for the new Space. Defaults to the same hardware tier as the original Space. Options include "cpu-basic", "cpu-upgrade", "t4-small", "t4-medium", "a10g-small", "a10g-large", "a100-large", subject to availability. + secrets: A dictionary of (secret key, secret value) to pass to the new Space. Defaults to None. Secrets are only used when the Space is duplicated for the first time, and are not updated if the duplicated Space already exists. + sleep_timeout: The number of minutes after which the duplicate Space will be puased if no requests are made to it (to minimize billing charges). Defaults to 5 minutes. + max_workers: The maximum number of thread workers that can be used to make requests to the remote Gradio app simultaneously. + verbose: Whether the client should print statements to the console. + Example: + import os + from gradio_client import Client + HF_TOKEN = os.environ.get("HF_TOKEN") + client = Client.duplicate("abidlabs/whisper", hf_token=HF_TOKEN) + client.predict("audio_sample.wav") + >> "This is a test of the whisper speech recognition model." + """ + try: + original_info = huggingface_hub.get_space_runtime(from_id, token=hf_token) + except RepositoryNotFoundError as rnfe: + raise ValueError( + f"Could not find Space: {from_id}. If it is a private Space, please provide an `hf_token`." + ) from rnfe + if to_id: + if "/" in to_id: + to_id = to_id.split("/")[1] + space_id = huggingface_hub.get_full_repo_name(to_id, token=hf_token) + else: + space_id = huggingface_hub.get_full_repo_name( + from_id.split("/")[1], token=hf_token + ) + try: + huggingface_hub.get_space_runtime(space_id, token=hf_token) + if verbose: + print( + f"Using your existing Space: {utils.SPACE_URL.format(space_id)} 🤗" + ) + if secrets is not None: + warnings.warn( + "Secrets are only used when the Space is duplicated for the first time, and are not updated if the duplicated Space already exists." + ) + except RepositoryNotFoundError: + if verbose: + print(f"Creating a duplicate of {from_id} for your own use... 🤗") + huggingface_hub.duplicate_space( + from_id=from_id, + to_id=space_id, + token=hf_token, + exist_ok=True, + private=private, + ) + if secrets is not None: + for key, value in secrets.items(): + huggingface_hub.add_space_secret( + space_id, key, value, token=hf_token + ) + if verbose: + print(f"Created new Space: {utils.SPACE_URL.format(space_id)}") + current_info = huggingface_hub.get_space_runtime(space_id, token=hf_token) + current_hardware = ( + current_info.hardware or huggingface_hub.SpaceHardware.CPU_BASIC + ) + hardware = hardware or original_info.hardware + if current_hardware != hardware: + huggingface_hub.request_space_hardware(space_id, hardware) # type: ignore + print( + f"-------\nNOTE: this Space uses upgraded hardware: {hardware}... see billing info at https://huggingface.co/settings/billing\n-------" + ) + # Setting a timeout only works if the hardware is not basic + # so set it here after the hardware has been requested + if hardware != huggingface_hub.SpaceHardware.CPU_BASIC: + utils.set_space_timeout( + space_id, hf_token=hf_token, timeout_in_seconds=sleep_timeout * 60 + ) + if verbose: + print("") + client = cls( + space_id, hf_token=hf_token, max_workers=max_workers, verbose=verbose + ) + return client + + def _get_space_state(self): + if not self.space_id: + return None + info = huggingface_hub.get_space_runtime(self.space_id, token=self.hf_token) + return info.stage + + def predict( + self, + *args, + api_name: str | None = None, + fn_index: int | None = None, + ) -> Any: + """ + Calls the Gradio API and returns the result (this is a blocking call). + + Parameters: + args: The arguments to pass to the remote API. The order of the arguments must match the order of the inputs in the Gradio app. + api_name: The name of the API endpoint to call starting with a leading slash, e.g. "/predict". Does not need to be provided if the Gradio app has only one named API endpoint. + fn_index: As an alternative to api_name, this parameter takes the index of the API endpoint to call, e.g. 0. Both api_name and fn_index can be provided, but if they conflict, api_name will take precedence. + Returns: + The result of the API call. Will be a Tuple if the API has multiple outputs. + Example: + from gradio_client import Client + client = Client(src="gradio/calculator") + client.predict(5, "add", 4, api_name="/predict") + >> 9.0 + """ + inferred_fn_index = self._infer_fn_index(api_name, fn_index) + if self.endpoints[inferred_fn_index].is_continuous: + raise ValueError( + "Cannot call predict on this function as it may run forever. Use submit instead." + ) + return self.submit(*args, api_name=api_name, fn_index=fn_index).result() + + def submit( + self, + *args, + api_name: str | None = None, + fn_index: int | None = None, + result_callbacks: Callable | list[Callable] | None = None, + ) -> Job: + """ + Creates and returns a Job object which calls the Gradio API in a background thread. The job can be used to retrieve the status and result of the remote API call. + + Parameters: + args: The arguments to pass to the remote API. The order of the arguments must match the order of the inputs in the Gradio app. + api_name: The name of the API endpoint to call starting with a leading slash, e.g. "/predict". Does not need to be provided if the Gradio app has only one named API endpoint. + fn_index: As an alternative to api_name, this parameter takes the index of the API endpoint to call, e.g. 0. Both api_name and fn_index can be provided, but if they conflict, api_name will take precedence. + result_callbacks: A callback function, or list of callback functions, to be called when the result is ready. If a list of functions is provided, they will be called in order. The return values from the remote API are provided as separate parameters into the callback. If None, no callback will be called. + Returns: + A Job object that can be used to retrieve the status and result of the remote API call. + Example: + from gradio_client import Client + client = Client(src="gradio/calculator") + job = client.submit(5, "add", 4, api_name="/predict") + job.status() + >> + job.result() # blocking call + >> 9.0 + """ + inferred_fn_index = self._infer_fn_index(api_name, fn_index) + + helper = None + if self.endpoints[inferred_fn_index].use_ws: + helper = Communicator( + Lock(), + JobStatus(), + self.endpoints[inferred_fn_index].process_predictions, + self.reset_url, + ) + end_to_end_fn = self.endpoints[inferred_fn_index].make_end_to_end_fn(helper) + future = self.executor.submit(end_to_end_fn, *args) + + job = Job( + future, communicator=helper, verbose=self.verbose, space_id=self.space_id + ) + + if result_callbacks: + if isinstance(result_callbacks, Callable): + result_callbacks = [result_callbacks] + + def create_fn(callback) -> Callable: + def fn(future): + if isinstance(future.result(), tuple): + callback(*future.result()) + else: + callback(future.result()) + + return fn + + for callback in result_callbacks: + job.add_done_callback(create_fn(callback)) + + return job + + def view_api( + self, + all_endpoints: bool | None = None, + print_info: bool = True, + return_format: Literal["dict", "str"] | None = None, + ) -> dict | str | None: + """ + Prints the usage info for the API. If the Gradio app has multiple API endpoints, the usage info for each endpoint will be printed separately. If return_format="dict" the info is returned in dictionary format, as shown in the example below. + + Parameters: + all_endpoints: If True, prints information for both named and unnamed endpoints in the Gradio app. If False, will only print info about named endpoints. If None (default), will print info about named endpoints, unless there aren't any -- in which it will print info about unnamed endpoints. + print_info: If True, prints the usage info to the console. If False, does not print the usage info. + return_format: If None, nothing is returned. If "str", returns the same string that would be printed to the console. If "dict", returns the usage info as a dictionary that can be programmatically parsed, and *all endpoints are returned in the dictionary* regardless of the value of `all_endpoints`. The format of the dictionary is in the docstring of this method. + Example: + from gradio_client import Client + client = Client(src="gradio/calculator") + client.view_api(return_format="dict") + >> { + 'named_endpoints': { + '/predict': { + 'parameters': [ + { + 'label': 'num1', + 'type_python': 'int | float', + 'type_description': 'numeric value', + 'component': 'Number', + 'example_input': '5' + }, + { + 'label': 'operation', + 'type_python': 'str', + 'type_description': 'string value', + 'component': 'Radio', + 'example_input': 'add' + }, + { + 'label': 'num2', + 'type_python': 'int | float', + 'type_description': 'numeric value', + 'component': 'Number', + 'example_input': '5' + }, + ], + 'returns': [ + { + 'label': 'output', + 'type_python': 'int | float', + 'type_description': 'numeric value', + 'component': 'Number', + }, + ] + }, + '/flag': { + 'parameters': [ + ... + ], + 'returns': [ + ... + ] + } + } + 'unnamed_endpoints': { + 2: { + 'parameters': [ + ... + ], + 'returns': [ + ... + ] + } + } + } + } + + """ + if self.serialize: + api_info_url = urllib.parse.urljoin(self.src, utils.API_INFO_URL) + else: + api_info_url = urllib.parse.urljoin(self.src, utils.RAW_API_INFO_URL) + + # Versions of Gradio older than 3.29.0 returned format of the API info + # from the /info endpoint + if version.parse(self.config.get("version", "2.0")) > version.Version("3.36.1"): + r = requests.get(api_info_url, headers=self.headers) + if r.ok: + info = r.json() + else: + raise ValueError(f"Could not fetch api info for {self.src}") + else: + fetch = requests.post( + utils.SPACE_FETCHER_URL, + json={"config": json.dumps(self.config), "serialize": self.serialize}, + ) + if fetch.ok: + info = fetch.json()["api"] + else: + raise ValueError(f"Could not fetch api info for {self.src}") + num_named_endpoints = len(info["named_endpoints"]) + num_unnamed_endpoints = len(info["unnamed_endpoints"]) + if num_named_endpoints == 0 and all_endpoints is None: + all_endpoints = True + + human_info = "Client.predict() Usage Info\n---------------------------\n" + human_info += f"Named API endpoints: {num_named_endpoints}\n" + + for api_name, endpoint_info in info["named_endpoints"].items(): + human_info += self._render_endpoints_info(api_name, endpoint_info) + + if all_endpoints: + human_info += f"\nUnnamed API endpoints: {num_unnamed_endpoints}\n" + for fn_index, endpoint_info in info["unnamed_endpoints"].items(): + # When loading from json, the fn_indices are read as strings + # because json keys can only be strings + human_info += self._render_endpoints_info(int(fn_index), endpoint_info) + else: + if num_unnamed_endpoints > 0: + human_info += f"\nUnnamed API endpoints: {num_unnamed_endpoints}, to view, run Client.view_api(all_endpoints=True)\n" + + if print_info: + print(human_info) + if return_format == "str": + return human_info + elif return_format == "dict": + return info + + def reset_session(self) -> None: + self.session_hash = str(uuid.uuid4()) + + def _render_endpoints_info( + self, + name_or_index: str | int, + endpoints_info: dict[str, list[dict[str, Any]]], + ) -> str: + parameter_names = [p["label"] for p in endpoints_info["parameters"]] + parameter_names = [utils.sanitize_parameter_names(p) for p in parameter_names] + rendered_parameters = ", ".join(parameter_names) + if rendered_parameters: + rendered_parameters = rendered_parameters + ", " + return_values = [p["label"] for p in endpoints_info["returns"]] + return_values = [utils.sanitize_parameter_names(r) for r in return_values] + rendered_return_values = ", ".join(return_values) + if len(return_values) > 1: + rendered_return_values = f"({rendered_return_values})" + + if isinstance(name_or_index, str): + final_param = f'api_name="{name_or_index}"' + elif isinstance(name_or_index, int): + final_param = f"fn_index={name_or_index}" + else: + raise ValueError("name_or_index must be a string or integer") + + human_info = f"\n - predict({rendered_parameters}{final_param}) -> {rendered_return_values}\n" + human_info += " Parameters:\n" + if endpoints_info["parameters"]: + for info in endpoints_info["parameters"]: + desc = ( + f" ({info['python_type']['description']})" + if info["python_type"].get("description") + else "" + ) + type_ = info["python_type"]["type"] + human_info += f" - [{info['component']}] {utils.sanitize_parameter_names(info['label'])}: {type_}{desc} \n" + else: + human_info += " - None\n" + human_info += " Returns:\n" + if endpoints_info["returns"]: + for info in endpoints_info["returns"]: + desc = ( + f" ({info['python_type']['description']})" + if info["python_type"].get("description") + else "" + ) + type_ = info["python_type"]["type"] + human_info += f" - [{info['component']}] {utils.sanitize_parameter_names(info['label'])}: {type_}{desc} \n" + else: + human_info += " - None\n" + + return human_info + + def __repr__(self): + return self.view_api(print_info=False, return_format="str") + + def __str__(self): + return self.view_api(print_info=False, return_format="str") + + def _telemetry_thread(self) -> None: + # Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1 + data = { + "src": self.src, + } + try: + send_telemetry( + topic="py_client/initiated", + library_name="gradio_client", + library_version=utils.__version__, + user_agent=data, + ) + except Exception: + pass + + def _infer_fn_index(self, api_name: str | None, fn_index: int | None) -> int: + inferred_fn_index = None + if api_name is not None: + for i, d in enumerate(self.config["dependencies"]): + config_api_name = d.get("api_name") + if config_api_name is None or config_api_name is False: + continue + if "/" + config_api_name == api_name: + inferred_fn_index = i + break + else: + error_message = f"Cannot find a function with `api_name`: {api_name}." + if not api_name.startswith("/"): + error_message += " Did you mean to use a leading slash?" + raise ValueError(error_message) + elif fn_index is not None: + inferred_fn_index = fn_index + if ( + inferred_fn_index >= len(self.endpoints) + or not self.endpoints[inferred_fn_index].is_valid + ): + raise ValueError(f"Invalid function index: {fn_index}.") + else: + valid_endpoints = [ + e for e in self.endpoints if e.is_valid and e.api_name is not None + ] + if len(valid_endpoints) == 1: + inferred_fn_index = valid_endpoints[0].fn_index + else: + raise ValueError( + "This Gradio app might have multiple endpoints. Please specify an `api_name` or `fn_index`" + ) + return inferred_fn_index + + def __del__(self): + if hasattr(self, "executor"): + self.executor.shutdown(wait=True) + + def _space_name_to_src(self, space) -> str | None: + return huggingface_hub.space_info(space, token=self.hf_token).host # type: ignore + + def _get_config(self) -> dict: + r = requests.get( + urllib.parse.urljoin(self.src, utils.CONFIG_URL), headers=self.headers + ) + if r.ok: + return r.json() + else: # to support older versions of Gradio + r = requests.get(self.src, headers=self.headers) + # some basic regex to extract the config + result = re.search(r"window.gradio_config = (.*?);[\s]*", r.text) + try: + config = json.loads(result.group(1)) # type: ignore + except AttributeError as ae: + raise ValueError( + f"Could not get Gradio config from: {self.src}" + ) from ae + if "allow_flagging" in config: + raise ValueError( + "Gradio 2.x is not supported by this client. Please upgrade your Gradio app to Gradio 3.x or higher." + ) + return config + + def deploy_discord( + self, + discord_bot_token: str | None = None, + api_names: list[str | tuple[str, str]] | None = None, + to_id: str | None = None, + hf_token: str | None = None, + private: bool = False, + ): + """ + Deploy the upstream app as a discord bot. Currently only supports gr.ChatInterface. + Parameters: + discord_bot_token: This is the "password" needed to be able to launch the bot. Users can get a token by creating a bot app on the discord website. If run the method without specifying a token, the space will explain how to get one. See here: https://huggingface.co/spaces/freddyaboulton/test-discord-bot-v1. + api_names: The api_names of the app to turn into bot commands. This parameter currently has no effect as ChatInterface only has one api_name ('/chat'). + to_id: The name of the space hosting the discord bot. If None, the name will be gradio-discord-bot-{random-substring} + hf_token: HF api token with write priviledges in order to upload the files to HF space. Can be ommitted if logged in via the HuggingFace CLI, unless the upstream space is private. Obtain from: https://huggingface.co/settings/token + private: Whether the space hosting the discord bot is private. The visibility of the discord bot itself is set via the discord website. See https://huggingface.co/spaces/freddyaboulton/test-discord-bot-v1 + """ + + if self.config["mode"] == "chat_interface" and not api_names: + api_names = [("chat", "chat")] + + valid_list = isinstance(api_names, list) and ( + isinstance(n, str) + or ( + isinstance(n, tuple) and isinstance(n[0], str) and isinstance(n[1], str) + ) + for n in api_names + ) + if api_names is None or not valid_list: + raise ValueError( + f"Each entry in api_names must be either a string or a tuple of strings. Received {api_names}" + ) + assert ( + len(api_names) == 1 + ), "Currently only one api_name can be deployed to discord." + + for i, name in enumerate(api_names): + if isinstance(name, str): + api_names[i] = (name, name) + + fn = next( + (ep for ep in self.endpoints if ep.api_name == f"/{api_names[0][0]}"), None + ) + if not fn: + raise ValueError( + f"api_name {api_names[0][0]} not present in {self.space_id or self.src}" + ) + inputs = [ + inp for inp in fn.input_component_types if fn not in utils.SKIP_COMPONENTS + ] + outputs = [ + inp for inp in fn.input_component_types if fn not in utils.SKIP_COMPONENTS + ] + if not inputs == ["textbox"] and outputs == ["textbox"]: + raise ValueError( + "Currently only api_names with a single textbox as input and output are supported. " + f"Received {inputs} and {outputs}" + ) + + is_private = False + if self.space_id: + is_private = huggingface_hub.space_info(self.space_id).private + if is_private: + assert hf_token, ( + f"Since {self.space_id} is private, you must explicitly pass in hf_token " + "so that it can be added as a secret in the discord bot space." + ) + + if to_id: + if "/" in to_id: + to_id = to_id.split("/")[1] + space_id = huggingface_hub.get_full_repo_name(to_id, token=hf_token) + else: + if self.space_id: + space_id = f'{self.space_id.split("/")[1]}-gradio-discord-bot' + else: + space_id = f"gradio-discord-bot-{secrets.token_hex(4)}" + space_id = huggingface_hub.get_full_repo_name(space_id, token=hf_token) + + api = huggingface_hub.HfApi() + + try: + huggingface_hub.space_info(space_id) + first_upload = False + except huggingface_hub.utils.RepositoryNotFoundError: + first_upload = True + + huggingface_hub.create_repo( + space_id, + repo_type="space", + space_sdk="gradio", + token=hf_token, + exist_ok=True, + private=private, + ) + if first_upload: + huggingface_hub.metadata_update( + repo_id=space_id, + repo_type="space", + metadata={"tags": ["gradio-discord-bot"]}, + ) + + with open(str(Path(__file__).parent / "templates" / "discord_chat.py")) as f: + app = f.read() + app = app.replace("<>", self.src) + app = app.replace("<>", api_names[0][0]) + app = app.replace("<>", api_names[0][1]) + + with tempfile.NamedTemporaryFile(mode="w", delete=False) as app_file: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as requirements: + app_file.write(app) + requirements.write("\n".join(["discord.py==2.3.1"])) + + operations = [ + CommitOperationAdd(path_in_repo="app.py", path_or_fileobj=app_file.name), + CommitOperationAdd( + path_in_repo="requirements.txt", path_or_fileobj=requirements.name + ), + ] + + api.create_commit( + repo_id=space_id, + commit_message="Deploy Discord Bot", + repo_type="space", + operations=operations, + token=hf_token, + ) + + if discord_bot_token: + huggingface_hub.add_space_secret( + space_id, "DISCORD_TOKEN", discord_bot_token, token=hf_token + ) + if is_private: + huggingface_hub.add_space_secret( + space_id, "HF_TOKEN", hf_token, token=hf_token + ) + + url = f"https://huggingface.co/spaces/{space_id}" + print(f"See your discord bot here! {url}") + return url + + +class Endpoint: + """Helper class for storing all the information about a single API endpoint.""" + + def __init__(self, client: Client, fn_index: int, dependency: dict): + self.client: Client = client + self.fn_index = fn_index + self.dependency = dependency + api_name = dependency.get("api_name") + self.api_name: str | Literal[False] | None = ( + "/" + api_name if isinstance(api_name, str) else api_name + ) + self.use_ws = self._use_websocket(self.dependency) + self.input_component_types = [] + self.output_component_types = [] + self.root_url = client.src + "/" if not client.src.endswith("/") else client.src + self.is_continuous = dependency.get("types", {}).get("continuous", False) + try: + # Only a real API endpoint if backend_fn is True (so not just a frontend function), serializers are valid, + # and api_name is not False (meaning that the developer has explicitly disabled the API endpoint) + self.serializers, self.deserializers = self._setup_serializers() + self.is_valid = self.dependency["backend_fn"] and self.api_name is not False + except AssertionError: + self.is_valid = False + + def __repr__(self): + return f"Endpoint src: {self.client.src}, api_name: {self.api_name}, fn_index: {self.fn_index}" + + def __str__(self): + return self.__repr__() + + def make_end_to_end_fn(self, helper: Communicator | None = None): + _predict = self.make_predict(helper) + + def _inner(*data): + if not self.is_valid: + raise utils.InvalidAPIEndpointError() + data = self.insert_state(*data) + if self.client.serialize: + data = self.serialize(*data) + predictions = _predict(*data) + predictions = self.process_predictions(*predictions) + # Append final output only if not already present + # for consistency between generators and not generators + if helper: + with helper.lock: + if not helper.job.outputs: + helper.job.outputs.append(predictions) + return predictions + + return _inner + + def make_predict(self, helper: Communicator | None = None): + def _predict(*data) -> tuple: + data = json.dumps( + { + "data": data, + "fn_index": self.fn_index, + "session_hash": self.client.session_hash, + } + ) + hash_data = json.dumps( + { + "fn_index": self.fn_index, + "session_hash": self.client.session_hash, + } + ) + + if self.use_ws: + result = utils.synchronize_async(self._ws_fn, data, hash_data, helper) + if "error" in result: + raise ValueError(result["error"]) + else: + response = requests.post( + self.client.api_url, headers=self.client.headers, data=data + ) + result = json.loads(response.content.decode("utf-8")) + try: + output = result["data"] + except KeyError as ke: + is_public_space = ( + self.client.space_id + and not huggingface_hub.space_info(self.client.space_id).private + ) + if "error" in result and "429" in result["error"] and is_public_space: + raise utils.TooManyRequestsError( + f"Too many requests to the API, please try again later. To avoid being rate-limited, " + f"please duplicate the Space using Client.duplicate({self.client.space_id}) " + f"and pass in your Hugging Face token." + ) from None + elif "error" in result: + raise ValueError(result["error"]) from None + raise KeyError( + f"Could not find 'data' key in response. Response received: {result}" + ) from ke + return tuple(output) + + return _predict + + def _predict_resolve(self, *data) -> Any: + """Needed for gradio.load(), which has a slightly different signature for serializing/deserializing""" + outputs = self.make_predict()(*data) + if len(self.dependency["outputs"]) == 1: + return outputs[0] + return outputs + + def _upload( + self, file_paths: list[str | list[str]] + ) -> list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]]: + if not file_paths: + return [] + # Put all the filepaths in one file + # but then keep track of which index in the + # original list they came from so we can recreate + # the original structure + files = [] + indices = [] + for i, fs in enumerate(file_paths): + if not isinstance(fs, list): + fs = [fs] + for f in fs: + files.append(("files", (Path(f).name, open(f, "rb")))) # noqa: SIM115 + indices.append(i) + r = requests.post( + self.client.upload_url, headers=self.client.headers, files=files + ) + if r.status_code != 200: + uploaded = file_paths + else: + uploaded = [] + result = r.json() + for i, fs in enumerate(file_paths): + if isinstance(fs, list): + output = [o for ix, o in enumerate(result) if indices[ix] == i] + res = [ + { + "is_file": True, + "name": o, + "orig_name": Path(f).name, + "data": None, + } + for f, o in zip(fs, output) + ] + else: + o = next(o for ix, o in enumerate(result) if indices[ix] == i) + res = { + "is_file": True, + "name": o, + "orig_name": Path(fs).name, + "data": None, + } + uploaded.append(res) + return uploaded + + def _add_uploaded_files_to_data( + self, + files: list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]], + data: list[Any], + ) -> None: + """Helper function to modify the input data with the uploaded files.""" + file_counter = 0 + for i, t in enumerate(self.input_component_types): + if t in ["file", "uploadbutton"]: + data[i] = files[file_counter] + file_counter += 1 + + def insert_state(self, *data) -> tuple: + data = list(data) + for i, input_component_type in enumerate(self.input_component_types): + if input_component_type == utils.STATE_COMPONENT: + data.insert(i, None) + return tuple(data) + + def remove_skipped_components(self, *data) -> tuple: + data = [ + d + for d, oct in zip(data, self.output_component_types) + if oct not in utils.SKIP_COMPONENTS + ] + return tuple(data) + + def reduce_singleton_output(self, *data) -> Any: + if ( + len( + [ + oct + for oct in self.output_component_types + if oct not in utils.SKIP_COMPONENTS + ] + ) + == 1 + ): + return data[0] + else: + return data + + def serialize(self, *data) -> tuple: + assert len(data) == len( + self.serializers + ), f"Expected {len(self.serializers)} arguments, got {len(data)}" + + files = [ + f + for f, t in zip(data, self.input_component_types) + if t in ["file", "uploadbutton"] + ] + uploaded_files = self._upload(files) + data = list(data) + self._add_uploaded_files_to_data(uploaded_files, data) + o = tuple([s.serialize(d) for s, d in zip(self.serializers, data)]) + return o + + def deserialize(self, *data) -> tuple: + assert len(data) == len( + self.deserializers + ), f"Expected {len(self.deserializers)} outputs, got {len(data)}" + outputs = tuple( + [ + s.deserialize( + d, + save_dir=self.client.output_dir, + hf_token=self.client.hf_token, + root_url=self.root_url, + ) + for s, d in zip(self.deserializers, data) + ] + ) + return outputs + + def process_predictions(self, *predictions): + if self.client.serialize: + predictions = self.deserialize(*predictions) + predictions = self.remove_skipped_components(*predictions) + predictions = self.reduce_singleton_output(*predictions) + return predictions + + def _setup_serializers(self) -> tuple[list[Serializable], list[Serializable]]: + inputs = self.dependency["inputs"] + serializers = [] + + for i in inputs: + for component in self.client.config["components"]: + if component["id"] == i: + component_name = component["type"] + self.input_component_types.append(component_name) + if component.get("serializer"): + serializer_name = component["serializer"] + assert ( + serializer_name in serializing.SERIALIZER_MAPPING + ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + serializer = serializing.SERIALIZER_MAPPING[serializer_name] + else: + assert ( + component_name in serializing.COMPONENT_MAPPING + ), f"Unknown component: {component_name}, you may need to update your gradio_client version." + serializer = serializing.COMPONENT_MAPPING[component_name] + serializers.append(serializer()) # type: ignore + + outputs = self.dependency["outputs"] + deserializers = [] + for i in outputs: + for component in self.client.config["components"]: + if component["id"] == i: + component_name = component["type"] + self.output_component_types.append(component_name) + if component.get("serializer"): + serializer_name = component["serializer"] + assert ( + serializer_name in serializing.SERIALIZER_MAPPING + ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + deserializer = serializing.SERIALIZER_MAPPING[serializer_name] + elif component_name in utils.SKIP_COMPONENTS: + deserializer = serializing.SimpleSerializable + else: + assert ( + component_name in serializing.COMPONENT_MAPPING + ), f"Unknown component: {component_name}, you may need to update your gradio_client version." + deserializer = serializing.COMPONENT_MAPPING[component_name] + deserializers.append(deserializer()) # type: ignore + + return serializers, deserializers + + def _use_websocket(self, dependency: dict) -> bool: + queue_enabled = self.client.config.get("enable_queue", False) + queue_uses_websocket = version.parse( + self.client.config.get("version", "2.0") + ) >= version.Version("3.2") + dependency_uses_queue = dependency.get("queue", False) is not False + return queue_enabled and queue_uses_websocket and dependency_uses_queue + + async def _ws_fn(self, data, hash_data, helper: Communicator): + async with websockets.connect( # type: ignore + self.client.ws_url, + open_timeout=10, + extra_headers=self.client.headers, + max_size=1024 * 1024 * 1024, + ) as websocket: + return await utils.get_pred_from_ws(websocket, data, hash_data, helper) + + +@document("result", "outputs", "status") +class Job(Future): + """ + A Job is a wrapper over the Future class that represents a prediction call that has been + submitted by the Gradio client. This class is not meant to be instantiated directly, but rather + is created by the Client.submit() method. + + A Job object includes methods to get the status of the prediction call, as well to get the outputs of + the prediction call. Job objects are also iterable, and can be used in a loop to get the outputs + of prediction calls as they become available for generator endpoints. + """ + + def __init__( + self, + future: Future, + communicator: Communicator | None = None, + verbose: bool = True, + space_id: str | None = None, + ): + """ + Parameters: + future: The future object that represents the prediction call, created by the Client.submit() method + communicator: The communicator object that is used to communicate between the client and the background thread running the job + verbose: Whether to print any status-related messages to the console + space_id: The space ID corresponding to the Client object that created this Job object + """ + self.future = future + self.communicator = communicator + self._counter = 0 + self.verbose = verbose + self.space_id = space_id + + def __iter__(self) -> Job: + return self + + def __next__(self) -> tuple | Any: + if not self.communicator: + raise StopIteration() + + with self.communicator.lock: + if self.communicator.job.latest_status.code == Status.FINISHED: + raise StopIteration() + + while True: + with self.communicator.lock: + if len(self.communicator.job.outputs) == self._counter + 1: + o = self.communicator.job.outputs[self._counter] + self._counter += 1 + return o + if self.communicator.job.latest_status.code == Status.FINISHED: + raise StopIteration() + + def result(self, timeout: float | None = None) -> Any: + """ + Return the result of the call that the future represents. Raises CancelledError: If the future was cancelled, TimeoutError: If the future didn't finish executing before the given timeout, and Exception: If the call raised then that exception will be raised. + + Parameters: + timeout: The number of seconds to wait for the result if the future isn't done. If None, then there is no limit on the wait time. + Returns: + The result of the call that the future represents. For generator functions, it will return the final iteration. + Example: + from gradio_client import Client + calculator = Client(src="gradio/calculator") + job = calculator.submit("foo", "add", 4, fn_index=0) + job.result(timeout=5) + >> 9 + """ + return super().result(timeout=timeout) + + def outputs(self) -> list[tuple | Any]: + """ + Returns a list containing the latest outputs from the Job. + + If the endpoint has multiple output components, the list will contain + a tuple of results. Otherwise, it will contain the results without storing them + in tuples. + + For endpoints that are queued, this list will contain the final job output even + if that endpoint does not use a generator function. + + Example: + from gradio_client import Client + client = Client(src="gradio/count_generator") + job = client.submit(3, api_name="/count") + while not job.done(): + time.sleep(0.1) + job.outputs() + >> ['0', '1', '2'] + """ + if not self.communicator: + return [] + else: + with self.communicator.lock: + return self.communicator.job.outputs + + def status(self) -> StatusUpdate: + """ + Returns the latest status update from the Job in the form of a StatusUpdate + object, which contains the following fields: code, rank, queue_size, success, time, eta, and progress_data. + + progress_data is a list of updates emitted by the gr.Progress() tracker of the event handler. Each element + of the list has the following fields: index, length, unit, progress, desc. If the event handler does not have + a gr.Progress() tracker, the progress_data field will be None. + + Example: + from gradio_client import Client + client = Client(src="gradio/calculator") + job = client.submit(5, "add", 4, api_name="/predict") + job.status() + >> + job.status().eta + >> 43.241 # seconds + """ + time = datetime.now() + cancelled = False + if self.communicator: + with self.communicator.lock: + cancelled = self.communicator.should_cancel + if cancelled: + return StatusUpdate( + code=Status.CANCELLED, + rank=0, + queue_size=None, + success=False, + time=time, + eta=None, + progress_data=None, + ) + if self.done(): + if not self.future._exception: # type: ignore + return StatusUpdate( + code=Status.FINISHED, + rank=0, + queue_size=None, + success=True, + time=time, + eta=None, + progress_data=None, + ) + else: + return StatusUpdate( + code=Status.FINISHED, + rank=0, + queue_size=None, + success=False, + time=time, + eta=None, + progress_data=None, + ) + else: + if not self.communicator: + return StatusUpdate( + code=Status.PROCESSING, + rank=0, + queue_size=None, + success=None, + time=time, + eta=None, + progress_data=None, + ) + else: + with self.communicator.lock: + eta = self.communicator.job.latest_status.eta + if self.verbose and self.space_id and eta and eta > 30: + print( + f"Due to heavy traffic on this app, the prediction will take approximately {int(eta)} seconds." + f"For faster predictions without waiting in queue, you may duplicate the space using: Client.duplicate({self.space_id})" + ) + return self.communicator.job.latest_status + + def __getattr__(self, name): + """Forwards any properties to the Future class.""" + return getattr(self.future, name) + + def cancel(self) -> bool: + """Cancels the job as best as possible. + + If the app you are connecting to has the gradio queue enabled, the job + will be cancelled locally as soon as possible. For apps that do not use the + queue, the job cannot be cancelled if it's been sent to the local executor + (for the time being). + + Note: In general, this DOES not stop the process from running in the upstream server + except for the following situations: + + 1. If the job is queued upstream, it will be removed from the queue and the server will not run the job + 2. If the job has iterative outputs, the job will finish as soon as the current iteration finishes running + 3. If the job has not been picked up by the queue yet, the queue will not pick up the job + """ + if self.communicator: + with self.communicator.lock: + self.communicator.should_cancel = True + return True + return self.future.cancel() diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/data_classes.py b/testbed/gradio-app__gradio/client/python/gradio_client/data_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..bfd86651167d15d20d386c839e9f32b1662461b0 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/data_classes.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from typing import TypedDict + +from typing_extensions import NotRequired + + +class FileData(TypedDict): + name: str | None # filename + data: str | None # base64 encoded data + size: NotRequired[int | None] # size in bytes + is_file: NotRequired[ + bool + ] # whether the data corresponds to a file or base64 encoded data + orig_name: NotRequired[str] # original filename + mime_type: NotRequired[str] + is_stream: NotRequired[bool] diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/documentation.py b/testbed/gradio-app__gradio/client/python/gradio_client/documentation.py new file mode 100644 index 0000000000000000000000000000000000000000..4d8d41ddf85f5a860956dad6256af90fe6c7f483 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/documentation.py @@ -0,0 +1,266 @@ +"""Contains methods that generate documentation for Gradio functions and classes.""" + +from __future__ import annotations + +import inspect +from typing import Callable + +classes_to_document = {} +classes_inherit_documentation = {} +documentation_group = None + + +def set_documentation_group(m): + global documentation_group + documentation_group = m + if m not in classes_to_document: + classes_to_document[m] = [] + + +def extract_instance_attr_doc(cls, attr): + code = inspect.getsource(cls.__init__) + lines = [line.strip() for line in code.split("\n")] + i = None + for i, line in enumerate(lines): # noqa: B007 + if line.startswith("self." + attr + ":") or line.startswith( + "self." + attr + " =" + ): + break + assert i is not None, f"Could not find {attr} in {cls.__name__}" + start_line = lines.index('"""', i) + end_line = lines.index('"""', start_line + 1) + for j in range(i + 1, start_line): + assert not lines[j].startswith("self."), ( + f"Found another attribute before docstring for {attr} in {cls.__name__}: " + + lines[j] + + "\n start:" + + lines[i] + ) + doc_string = " ".join(lines[start_line + 1 : end_line]) + return doc_string + + +def document(*fns, inherit=False): + """ + Defines the @document decorator which adds classes or functions to the Gradio + documentation at www.gradio.app/docs. + + Usage examples: + - Put @document() above a class to document the class and its constructor. + - Put @document("fn1", "fn2") above a class to also document methods fn1 and fn2. + - Put @document("*fn3") with an asterisk above a class to document the instance attribute methods f3. + """ + + def inner_doc(cls): + global documentation_group + if inherit: + classes_inherit_documentation[cls] = None + classes_to_document[documentation_group].append((cls, fns)) + return cls + + return inner_doc + + +def document_fn(fn: Callable, cls) -> tuple[str, list[dict], dict, str | None]: + """ + Generates documentation for any function. + Parameters: + fn: Function to document + Returns: + description: General description of fn + parameters: A list of dicts for each parameter, storing data for the parameter name, annotation and doc + return: A dict storing data for the returned annotation and doc + example: Code for an example use of the fn + """ + doc_str = inspect.getdoc(fn) or "" + doc_lines = doc_str.split("\n") + signature = inspect.signature(fn) + description, parameters, returns, examples = [], {}, [], [] + mode = "description" + for line in doc_lines: + line = line.rstrip() + if line == "Parameters:": + mode = "parameter" + elif line.startswith("Example:"): + mode = "example" + if "(" in line and ")" in line: + c = line.split("(")[1].split(")")[0] + if c != cls.__name__: + mode = "ignore" + elif line == "Returns:": + mode = "return" + else: + if mode == "description": + description.append(line if line.strip() else "
") + continue + if not (line.startswith(" ") or line.strip() == ""): + print(line) + assert ( + line.startswith(" ") or line.strip() == "" + ), f"Documentation format for {fn.__name__} has format error in line: {line}" + line = line[4:] + if mode == "parameter": + colon_index = line.index(": ") + assert ( + colon_index > -1 + ), f"Documentation format for {fn.__name__} has format error in line: {line}" + parameter = line[:colon_index] + parameter_doc = line[colon_index + 2 :] + parameters[parameter] = parameter_doc + elif mode == "return": + returns.append(line) + elif mode == "example": + examples.append(line) + description_doc = " ".join(description) + parameter_docs = [] + for param_name, param in signature.parameters.items(): + if param_name.startswith("_"): + continue + if param_name in ["kwargs", "args"] and param_name not in parameters: + continue + parameter_doc = { + "name": param_name, + "annotation": param.annotation, + "doc": parameters.get(param_name), + } + if param_name in parameters: + del parameters[param_name] + if param.default != inspect.Parameter.empty: + default = param.default + if type(default) == str: + default = '"' + default + '"' + if default.__class__.__module__ != "builtins": + default = f"{default.__class__.__name__}()" + parameter_doc["default"] = default + elif parameter_doc["doc"] is not None: + if "kwargs" in parameter_doc["doc"]: + parameter_doc["kwargs"] = True + if "args" in parameter_doc["doc"]: + parameter_doc["args"] = True + parameter_docs.append(parameter_doc) + assert ( + len(parameters) == 0 + ), f"Documentation format for {fn.__name__} documents nonexistent parameters: {''.join(parameters.keys())}" + if len(returns) == 0: + return_docs = {} + elif len(returns) == 1: + return_docs = {"annotation": signature.return_annotation, "doc": returns[0]} + else: + return_docs = {} + # raise ValueError("Does not support multiple returns yet.") + examples_doc = "\n".join(examples) if len(examples) > 0 else None + return description_doc, parameter_docs, return_docs, examples_doc + + +def document_cls(cls): + doc_str = inspect.getdoc(cls) + if doc_str is None: + return "", {}, "" + tags = {} + description_lines = [] + mode = "description" + for line in doc_str.split("\n"): + line = line.rstrip() + if line.endswith(":") and " " not in line: + mode = line[:-1].lower() + tags[mode] = [] + elif line.split(" ")[0].endswith(":") and not line.startswith(" "): + tag = line[: line.index(":")].lower() + value = line[line.index(":") + 2 :] + tags[tag] = value + else: + if mode == "description": + description_lines.append(line if line.strip() else "
") + else: + assert ( + line.startswith(" ") or not line.strip() + ), f"Documentation format for {cls.__name__} has format error in line: {line}" + tags[mode].append(line[4:]) + if "example" in tags: + example = "\n".join(tags["example"]) + del tags["example"] + else: + example = None + for key, val in tags.items(): + if isinstance(val, list): + tags[key] = "
".join(val) + description = " ".join(description_lines).replace("\n", "
") + return description, tags, example + + +def generate_documentation(): + documentation = {} + for mode, class_list in classes_to_document.items(): + documentation[mode] = [] + for cls, fns in class_list: + fn_to_document = cls if inspect.isfunction(cls) else cls.__init__ + _, parameter_doc, return_doc, _ = document_fn(fn_to_document, cls) + cls_description, cls_tags, cls_example = document_cls(cls) + cls_documentation = { + "class": cls, + "name": cls.__name__, + "description": cls_description, + "tags": cls_tags, + "parameters": parameter_doc, + "returns": return_doc, + "example": cls_example, + "fns": [], + } + for fn_name in fns: + instance_attribute_fn = fn_name.startswith("*") + if instance_attribute_fn: + fn_name = fn_name[1:] + # Instance attribute fns are classes + # whose __call__ method determines their behavior + fn = getattr(cls(), fn_name).__call__ + else: + fn = getattr(cls, fn_name) + if not callable(fn): + description_doc = str(fn) + parameter_docs = {} + return_docs = {} + examples_doc = "" + override_signature = f"gr.{cls.__name__}.{fn_name}" + else: + ( + description_doc, + parameter_docs, + return_docs, + examples_doc, + ) = document_fn(fn, cls) + override_signature = None + if instance_attribute_fn: + description_doc = extract_instance_attr_doc(cls, fn_name) + cls_documentation["fns"].append( + { + "fn": fn, + "name": fn_name, + "description": description_doc, + "tags": {}, + "parameters": parameter_docs, + "returns": return_docs, + "example": examples_doc, + "override_signature": override_signature, + } + ) + documentation[mode].append(cls_documentation) + if cls in classes_inherit_documentation: + classes_inherit_documentation[cls] = cls_documentation["fns"] + for mode, class_list in classes_to_document.items(): + for i, (cls, _) in enumerate(class_list): + for super_class in classes_inherit_documentation: + if ( + inspect.isclass(cls) + and issubclass(cls, super_class) + and cls != super_class + ): + for inherited_fn in classes_inherit_documentation[super_class]: + inherited_fn = dict(inherited_fn) + try: + inherited_fn["description"] = extract_instance_attr_doc( + cls, inherited_fn["name"] + ) + except (ValueError, AssertionError): + pass + documentation[mode][i]["fns"].append(inherited_fn) + return documentation diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/media_data.py b/testbed/gradio-app__gradio/client/python/gradio_client/media_data.py new file mode 100644 index 0000000000000000000000000000000000000000..ecbb7442a6c7c1a13f24418bea01e74aeee4d033 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/media_data.py @@ -0,0 +1,8655 @@ +BASE64_IMAGE = ( # test/test_files/bus.png + "data:image/png;base64," + "R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==" +) +BASE64_AUDIO = { + "name": "test/test_files/audio_sample.wav", + "data": "data:audio/wav;base64,UklGRuI/AABXQVZFZm10IBAAAAABAAEAQB8AAIA+AAACABAAZGF0Ydw+AACO/w//5P6R/9D/SgDJAGIAegA3ALkAPAC8/zEA4/+G/8X/3//f/+n/jv+d/87/mP+p/7v/jv/C/ygAogB+AOQAHADX/1EAQwCz//T/kv/B/oD/rf8VABUAKAA3ANv/4P/o/8T/5/8o/6P/dgDDADcBUwCu/w3/+f5Z/5L/YQCfAMsAaAGxAXgAg//m/lT+Rf6k/lQA8wAXAR0BtwD1AF4Amf8g/xX/Tf/8/rb/FQDc/6sA6wAJAeIABQEyADn/af7D/b7+Mv8nALwAdAFAAooBswAKAEz/4v66/nb/KAAlAEoAQwBIAM//qf85AGAAeP+z/5f/n/8rAOL/MwBkAMsACwHxANUAjP8B/w7/2/7X/vj+TgDp/0MA5wDRAOMA5v+Q/+n/1/+C/zL/qf/y/yMAhQBEAEAAyf9A/23/JQCZ/5EArgDkAGMAmP/o/9b+Hv9O/8f/mQCdAIwAYwDX/3T/5v7//8r/PQCNAMIAvADq/4//SP8yAMP/1v/t/67/AgBaADwAAQD+/4YAZQDmAHAAgf+S/0D/D/94/7oA1QDaAMoAQgEFAX0A+v+S/i3+lP4o/ycACQBlAMQALAHxAJb/ZQBV/4T/z/8HAMUADgEuASQANwCCAD8A2/9e/wz/O/8u//T/+////ysATABVACABbQAwAMX/tf44/93+vf8IAHEAJAGnATYBoQCn/3j/VP65/vz///83AE8AeQDD//X/b/9RAMz/vwBmANP/dQAaAKT/vP/X/57/xP9B/1H/Bv+nAPgALwF3AY8BFQDe/9f+tv73/qT+hgBPAPcAOgAoAC8Akv/C/3YAaP/3/1//d/+6/6b/TQCAAPMAtgC5AN7/dv/s/fj+Ov/6/+8AfAGQAagB1gBV//3+kf7R/oH+jv/H/3AAdgCYABAAowDK/97/uwAEAJEA3v8SAJ3/b/8vAO3/8f+QAFT/OgCCAEkAKwAFAKL/Qv/S/4//yP/s/2wAPQB3AF4AlAAXAAsAZP+a//b/rv8ZAOb/EgCt//z/sQAlAC0AJwHs/1D/G/68/k3/z/+TAfgAewE7AvwA8v+Y/nn+7P7E/YMAmwDQAIABYwBxAEYAHwBrAIP/Rv9m/9f+GwBH/7j/0wCVAfgBCAHJ/8f/s/7+/rb/BP+v/zMAzgDa/+T/twAfAKD+7f91/+f/sQDq/6H/AACZANAAfgD1/+n/aP6h/9X+uP4CAHkAqAGBAT8BkgHZ/33/Df9j/jD/PP/HAI4AIwChAKsApv+3/yD/kv/+/x8A+/8v/xsASgBbAIcAdADy/4YAaP/w/8v/T//U/zkA2P+dADQBdAAqAP3+bP/P//r/i/+M/in/bQAaAEQBhwDsAJcAXf+o/+T+TP/A/1cANgCIAI0AJQHK/53/AwCqAEQBWAD6/8X/dv/L/83/q/9rAFsA/ABPAMf/xf5K/+7+Sf9nAPwAjAGYAA8Ar/+b/5L/kf8m/z8Ad/83AVgA2P/cAJn/VwDG/6P/gP8Z/z7/XP/P/oUA7P9XAK4AKwCNAKn/Iv9YAAUA3P8DACoAPgC8/moAFgA1ANEA9P/r/7IAxP/c/kD/vv9cAEoArAFmAVEAagBJABj/yf+X/z8AGABY/2kA2f85AC4APP+c/+f/yf8T/+r+bgCu/x8AJgKUAbMBTAI6AGv/TP7//X7+vv7sAL//bAEnAoYATgCt/+n/Uv9w/tP+j/6i/0YAUAA8AXgBIQJEAfL/Cf6a/if/iP9bADsBugLiAiMBVv/e/r3+EP7s/Xr/qP9z/4AAQwCk/7MAlwDoAOgA6f+A/+n+D/9E/if/BwHTABIC2gGEADMAUf9P/3D+lv7F/sv/6QBPACQAWwDgANn/2f8I/z7/7P96/lr+vABgAWYBEgJaAT8Asf/N/3n+FP6N/kP/mADsARIB7AC4AIX/kv54/v3/BQDf/0sAKQCqAGEATP8jAMr/7ADtALL/9f6k/pT+vv7t/84AyAG7AQECJwDG/7n+d/2X/uD/6QBKAZ8BOgGbAAwACv/f/goAsP+d/2z/QQFJAML/uP/Z/xABmf8LAE8AEgCM/wn/c/99/04AgQHG/5IBOwFrAGABOAC+/+/+5v6W/j/+qf/mAGX/9AC/AHb/i/8g/6z/n//J/2wAiABZAZABiADBAMP//f8PAE4AEgAvAPH+jv7A/+n/OgDk/4wAKAAVAJUAj/99/tP+Mf4AAMgBGAFZAZUBhwCh/2b/Y/+C/2f/6v8X/3n/+v7A/mkAr/8ZAF8B/wDBAPH/8P/o/9j/TACr/wwAZgC8////3f+4/mz/XgCF/9D/XwA2/6v/pv/3/1YA1QDmAFQAnABDALX/NQDx/zEAewFfALsAVwCH/77/7/5m/9D/Qv/k/4n/7v7S/n79tv/DACEALAHaAacBugDfAJIA7v+x/+X/EP+d/+j/2P8LAMH/Iv8PABcAlP/I//D+VwDS/mT/jwB4APUAwAC5AD0BAP+PAGsAIP8gAaT/sAAqAL8A9AAG//n/SABU/nX/uv/p/37/gP85AMX/aQBMAMn/Mf9vAOb//QBHAPn/hgDi/ykAGv9h/kAAqwCU/wAAZQBgART/i/+F/5D+YP9wABoAUABNAe8AcwCbAK4A8f+oALYAkP89/8f/7f7+/8b+Tf+yAPX/CAEHAaz/ywAbAXv/Kf/R/5EA2f9uAQAANf+5AKkAZf9T/xABLwB0/yoAIgAKACsAGP+B/93/mf+6/+r/bP9s/in/fwB5APAAKgEvAdIBTgBsAFMAMf+3/s/+GAAWAL0AQAEFAH3/cf8aAMj/tP9+/+D+lwDsANP/mP+DALH/pf+MALQAwgDlAAwAbf/5/00A5/99/1AAZv9q/8H/0P6+/vj+4/9hAdb/xwDQAIX/zP7e/uD/I/+T/0QBOQCtAE8B3v6DANb/Dv9T/1YA2P9p/4QAngF0AfcARwBD/9wAGP8u/yv/z/7T//b/yf9vAKIBlAALAHEB3v+8/s7/H/70/LD+FAGGALcBZwIeAbkA2gBB/2H+0P5V/93/ZwC2AVL/uP+o/yj/r/+6/p//hf/K/qYBKwIoAUIA8wD8/zD/ggDC/tr+2v7d/9r/RQE5AgEA7f+TAcn/Xv8AAB0AlP65/hUB5v8nAU4CBwAI/xgAU/5i/oz+6v6u/7sBCgKuAQ0BkAD1/rT/R/8+/mkA0f1n/4cA9gDLAKgB3gBg/1cA6wCX/lT+AQAG/m7/FgGo/xAAeAExALcAbf+//x7/Uf8pANf/QgCbABcB8QCyABD/rQDQ/gH/9f9F/mcAbQC4/14AtQA1AW7/LP+OAGT+9gDsAEb/BwEbAMoABAHS//z/g/9i//T+qv0AAOv/b/+QAKj/2gDKAScAdQHl/0YAEQDn/+kAzf6xAEgANwAGAGYAOf+D/zUAdP6R/6r/W/8oALz/UQErAKEAGQHv/jQAQf/B/2X/CAA6ALcAjAGAAHD/NwGsAHQAAP++/r//Yv6J/+j+zv9T/0YARgFHARgA7wAdAIT/RwCe/yEAQgAuALT/FwCYARMAV/9pATf/XwD+//f/F//V/yb/fv8FAPf/dQCP/xsAMv/mAOH/lAA5AXT/Vv4/Avb/n/8mAcEAhP9i/+3/4P24/8H/JP+g/iQCZf/wAD4B1P88AJgAXQDY/oj/QQCQANn+UwCd/5gB//9o/w8Apv8n/4X/t//j/4sA1P+oAMf/UQFv/zn/sgAtAFMAogDm/4oAkADBALD+5P4qAWz+bwCI//P/0/5n/1v/R/7R/5gAqQCvAGsBpQDyAAP/JQDr/9H/4P/8AB0A2ACBAGz/xv7U//H/cv/PATD/6/5p/44Aef+c/0gAhQBOALYAif/O/0YB3QD7/4IBggBKANcAhP5CAF79qf9H/4n/yQKd/2sAMQC2/uf/y/79/yAAh/+oAF8B5QCG/5L/b/8YAB7/pgEV/xn/3gD9/sf/TP+M/0oB0AAUACX/Af97AQL/Sv/F/3UAqwDbACMAWQEGAPP/LgGe/3MAcf+7/ZP9X/7t/f7+0v6lAiQBhwI1Az4A0v4//3v/Vv97ABQAKwFw/+8B+f5m/y3/Vv6vALwAHwG6/qb9VP8y/lj+WwBOAWcDfAGiAAsAFf8+/SL/of7l/5UC0gLHATwBYQCU/oT/GP67/sr/SwLI/3D+GAA1/13/uv81/iYBygHA/+L/tf/IAFD/EwHVALEA6wDbAM//fwAdAJr/3P86APf/DQEvAZn/NgBv/sH/Bf4YADL/d/7BAOD+3v95AmABEQAOAIf/5f+0/SUARwKy/zMBrgGz/1QBW/5g/6L/Gf9wAEr+GwEeAP79af9v/9D+4wAI/yEBwwAb/7MAC/8pAEUChwDwACQBnP8oAKH9mf/k/uL/MQFsAN0AQADV/yT/7P27//f+pf9NAPYA/QBcANgBgf7jAaf+7v+V/4v+cwBo/nMApAJtAV0AMf+zACQAAP4tAFT/oQCX/8MBLQEpAboAhv8Z/oj/H/+6/9n/mP8MAcL/PAIeAQQBMgHIAOP8xv5c/lf+dv36ASQCQQE0BJUANAH8/zEABP3t/yP/Tv9NANYA5v4CAEcAuP8EAQMAx/36/BwAwvwfAC8BOgOmAF8CCQGvAJ0A0/1J/Pv9mgCN/8cCHQHNAWMAKwH7/Yv/mv3W/nz8K/4QACIAUgKNAI8B6QE3A4r/JgD8/Ef/Gf2AAVsA2v6lAT4CDQHY/xwALv8s/uP85v/K/OUB1QCMAHoA1AOlAqX/uP+h/cP92v2a/qgA8P+PAZwEvv6QAsr9r/4d/lL+OACL/jEB2AESALH/3gIEACsBnwCbAf7+5/6q/u/+/v0VARcCNAEYApT/1gCu/Z7+CP7U/c7/bQH0/zwCFQH9AKYAh//YAPD+nf+3AO3/aP90AQAAwwJG/6QBz/9N/OT/Gv3a/HH/pv6jAOwBkwEtA37/YgF+/gz+hQBaALAABwME/58AVQGT/kQA5P2s//z+yf+UAIH/hgBKAFX+FALh/3UAK/+O//v8cP4WAkAAkQIyAQsDbwFMAhv/c/2J/Vr+qv2BAWUAJQAyAOL/WwDL/OUBGP50/r8AzwCOAPsDDgIXAX7/WwBt/7j7X/+b/Ab/pf/pACgB5AL4AL3/KwCJACoAwP5v/8n/YABF/rQAn/8iAgYAAQKZAFj+6QCI/q/85P8jAQcB4QDTANoCr/3F/7b8r/wv/8P/kADhAa0CTAKlAGsBvwHk/TP/6/83/sj+Cv+X/9oB5P+GAgEACP+5AEP9uPvy/p//lQF8AfoCjgNP/woCov4F/ff9R/+8/rcA2AAFA9cAKwDIAP39zgD//q/+l/26/2L+wQAkAX0DAwIGABID0/6r/QL+m/19/z//wP+UBIX+xQHv/qz/1ADT/jMCB/9VAKsAz/43/xYCu/7AAN//lgCY/u7+ov36/NYAtgKeAekArwSP/3j/zP65/hb+Zv+S//P/6v9iArkAhf5xAIz/NgH1AAYA9v7W/zL/GADn/sYDZf8tAXoCnf3+/5b95P6A/xL+rQDnAQQDrgHy/qgB6P0W/5T+ov5z/4ECAQGeAKABawG7/zz/IAE1/Yj/AQEq/vX/NQFh/5gBIQD7ATb8lQCnAHL80//UANcAbAAEAkIA1v9j/wD/M/4iAZv+agF6ACsA0P9dAdUABQAEAZr/CwI4/hb9q/qT/zz+xf8UArUElQCZAO8CA/7K/+z9RP+k/r8CsgE9ANn/HwJr/ff+1P70AUf/Jv0CAaf8+AIa/9AAUgCjALr/IAAP/zICav9t/20AiP9qAWb+2AFT/Rz+vgDiAY/7fgA3Adz+9QDsAJ4C9v/uAUUAeP8gAKb9Hfw3/wT/QwEqAVoBiQGlAO0AwQBk/s7+Uf8P/noBnv8jAwMBB/4aAYv9N//JACn9zwL8/kcB9wJo/5EC6/4w/joBWQDFAAUAVvy6AKz9Xv5K/8D+YAICArH/AgRj/db/GP7//ZQC8P3YBZ8A7/+jALP/t/27/gL9vAAJAKQCAQEC/sQASv9R/vX+OAEA/3wDhP4mAgX9XwJw/6/+YQDW/gADK/4cAST+hP+6/UUDZgBr/z8AfQJC//MA7/8u/xH+P/76ATr8tgKG/tEAWgDOAu//m/9CAYv/5vzGAdcCMf8v/2wASwF//c4Ahvx0AFv9agLmACsAwAFEAjUA//6EAJD/PAAnARcCq/wTABIAA/1C/BsBnP10AlICegPz/wIAPAL4/N3/MQB2/REB5QFV/70A5PxpAwX+8/65ADgC8f4VAEX/xQF1AVn+6AEf/XwBxv5mAH4AE//k/YwC3P6eAG/9iP8XAwz/fgCvAvkBWABKAbP7AQGv+zoCWv9x/ywDa/2FACMB2PzzADUBAABmApn9HgNv/Jn+RAA+/bf/hQPk/jwDjAFE/0oBRPy1Af36b//AAggBeQAyAd7+6wFk/g7+ov8H/1sBZv5+AFoATwE8/m0CJf2VAen/jf87Auz8sP+U/6AA+v+bADQD9v/+/tcCgv1L/pL+Xf+X/WQBdf8FACMBMAGH/wD/qAIG/1H+7P+yARoBrwEW/xACMP8eASL+Ff7W/IX9UQHF/xwDkwNgAbEAuACn/cL+CABXAX/87ACUAesBxf5MAX//aP2ZAcf/6/9G/jkC/vwsAF0AswGK/00D4QBK/RAC+/2L/o398v6lAnsC7v/HAwf/RwGL/C4Be/5c/L4Asv/cAXYBvAA5/h8CY/4oAXH9XAHE/iL/YwAtAZL+2gJrAcT+VQMg/zYC/P04/+38ev9p/jX+mP2JA0ABXgBwAYf/CP8WAA3/3P8xANH/OgKc/Q4EcP7Z/pX/Ff/Q/d4Aov8WAZj/L/2wAQT/jwGD/x0BvgGH/1kANQJO/pv/i/0c/vcA+/6YAfsCJQGWAcT/JP8RAWf6RwAj/4f9YQJA/yYBkwAg/6sDjwDAANAAkfyfBKf9NP5CAeP9lv81AOb/PQI8/6z+DgCk/hgCWf5ZAG4BaADMAEgAP/7/AZb8qv83APT+tANT/6cBAQGT/1wAwwHl/AYAkwI3AL39pv2v/jX9Pf9i/6cBpwWCAw0DAQXDAKsBgP9T/UkCjP6b/hP+mf5A/0z5ifxmAEj7z/hr/mX5of6fBODxZwTiC/n7KgmSBAAKDQhb+3sKrgdg/Y4CiwEp/mz9oPzB+P/88ve/9OX9yvqZ+xH+Nv4GASgATQA0A0gC7QPoAVUEkgMWBK0BlwR/Az4CTwTAAdMARf+kBBr9KgDW/6QCoP/DANH/Yf5yAKb4e/zI+Vb4Dvvm+vz2cAOV/Cj7VQaJ/JQHgAgB+ikO5QUC/GgMxQOWBq8Fsfy/Clv/ge7vAhn5XfWI9FHxqQOC+GrxRgAOBFj+SgDCC84MkQhUCJEIOxAICGoBIAoeBjD/Iv+v/J39Evho9gL5rPVw/M33svZe+s36Zvqb+az+uPy7/k8AsgCQ/rgD8wNvAQcHagWmCOYEIATIBkEAcQK/AqkEvgGSA3QFLAEWAyL+oQC6+Xb9qP/D+Ir4Gf+/+Qn2lgBt+vD9PQC7/lEFEAR0//kI9QZyBogDwAPPCp8BgPVHAPMDlvIA9FP4Svy/9Ez0I/3r+2j7ePqBAFEEiQJ4BgoIkAyLC04Nqwz/Cw0JoQEqBfgBagAZ+1z9Hf0d+KD6Qvs19nv59vrk+B/6Wfrt/Bz4HP0d/b7/8ALY/jUDKASfA6kE2ADzA3ECNgE4B0gD1ASMBUIBNwLcB7r/kwFgBIL/oP/p/MT5oP7t+ivxu/2m/tf6BvqT/boDvv6i+gAJ0wfZAtMABQd5CjsD3v8YApsJkfqR/bj8KP8I9hbySvkW+v74s/Lx/Mf5UvvN/ywENAU1CVQJagoUEO0Lsgb3ByoI6QRmA/4CAgDT+jL8kfi5+lL3xft1+sb4QfsI+wH80/nM+2/9bf4y/BMErv2j/CwDsgMs/nAHywObAeQGJgLpBncBngMvB0ADRP+PBvgB5gAU/Wf+PgSBAhH6bfsWA074Avas+WH/rfki9o79xQTh/tT8/gS/COMDLQZMCe4JTgRM/s8Cx/4t/hH7yfs6/uv4mfWH9zv1V/Zp88/4kv7f/xoIugWpCX8LUQpHDVULDQnIClAFjwPBAiACKv8r/pX7N/+J/Zn2y/098wf1bPpn+DT6Mvtk/fX+//+i/WX/1ALO/fcBNQTT/5kDrQWKA5MCVgSnBnwFqPvDBMcGYAEa/7EEOAax/4T8hgDbA2z61PnQ+xwBtPeT9rH62v/5+BT5ggIGBR4EpgFgB8wGmwWMAwcGUAIFBXr/4QKs/V38n/ta94X2SPYR9+f1kvtb9Zj95/3QAK4CSQZNCLwLbQdJEugM+wPxDXgElgLKACYCVPxW/Sv6ZP1s+V35+/rz+Ln2lP2E/BL39/4y/AX+V/1WAisBEwHn+9D+QwXkAWz/2wTlB/sB+/7OBp0KowAHAPsFGgkvAJb9EAHlAWL7Y/o9AcoDBP9N+xz77/3D+Hj0bvyu+lv+Sv/bBXcD1ARmBOkF5QUQAzoGwQFEBb7+swDL/OX5APyW9371IvuC8x/5u/pu8cD/4P4t/90HwQVADVsO8AlNEHEIkQQiBG4EFv8fAjEBBQCq/Rb/yf3R+BT94vYz+iz2MPgHACT5F/WGAYUAUv8V++7/WAWK/OT/swK7BaQE2AHcBMQLpgAt/+cDywZzAcz94gckBf79nf07AqoAKf6k/E8BZf1k+6D5+Pcl+0r89/qk/TwE5P4zA/cBowEgB5cBPwYnB2ECJQhRA7b9v/6Z/kb77fho95n6H/bp87X5MPcw+5/7uwKZAlMDgAn9B/0JFQzjBzML8ws7Bi8G7AK1/5EAZP21+Cn+MPwh+vD0y/cYAUP2MfWkAI/+Sf5g94oBfwKg9xAAY/+VBg8Cx/47B2QGBAFB/yoCUAjlBKf92wU6BU7+TgN+/yoEgAAw/hwHDv+U/qf8CfuU+J/5KfnT+oL91vvZ+9gBwAAeA/0DqAMEBhMFDAfPAkkDeQAvCPUA5P4z/rL9+/uD9EL3sfXs9mz2evmD+Zv9+QN+BcYDCAsvCRoICwhVCpkISwKsCHMFSwVLAJoCRAKi+SD4DvmB/cb3mfV0/Kz/Sfzh+G0AE/0M+mb2ov7rAY797f9+AtkKY/4rAt8AoAXqBsv+uQQfBakB5wTPA6EE1gPN/y8Cmv9GAf77hACK+oD8xv3B/BH+uvsw+XT5kPkI/OD+jfxsAU8EVgmKAwYIMweyBmYB3gKx/gQBB/6B+6v/xfgU/gD27fly9S/18feL+GP7cwNNAOgDCwuID8cK7QeWDSELGwc0/gwHfwIEAov4bQGtAgT7Bfk0+s/9Fvai96b8kv10+UD8AfvZAM37qvp5/s0Fzv0dAJEE2wIIBo//twToA4UBDQJDBtICDQT9BOwDCP8HBNoBeQDl/wT+oAB6/F7///nb/nv4KPyP+Xf93P2N+UwANf/1AUYCYwcCB34HIQZ/BqkCOAH3/mb/U/6l/uj8P/zv+F745PXA72L6Hvzy+lT5GwKoDJMDkgC+C6sKTwbNBUQHUAyNBRcBBgUcBP3/Afyr/OH/3PiK89n9bf3297f4Xf3g/or74fsP+/D/Q/46/T3/UARk/0YB/QPEAJwEGgAvBvkDcADRBMkDvgG4ALcCBAV4AAgHwAL3AIf/TQD+/S751/r/9S7/RPY9/0P8Sfqu/Rj+zgCiABkFpQbuBQIGkAiLAzUItQFbAwwBNABW+9n/6vbo72H1Avr890ryTPsvAmsAp/u9BucHqwrWBEEKrQwxDCsD8whkB64BaQHK/7gBnvgd/FH3ngDf+JH4B/9p/ej5z/vp+637tPv1/PgBuv1m/yn+gAGP/vcAyQBpBaIAZgX4BYEBzQY9AYgE6wBCAfsEqAK1AZoCmP/fAzv9Wf29/Lz69fxD+4z79/pb+rf60fs//Ff9IwLpAm0ClwmZCOEFKQYhCE3/Y//SAQ8DFv7X+937C/7H+q3yy/aV+pP2j/EW/soFhQEKAgAJgwgpC/gFbAeNDGIGIwWnBNIHqwGV/ev97/0//mz6c/12/Qj5tPo8/A77o/iA/Db/1vfZ/rEA9/jx/LAD7P9lANgCLgX9BDr+0AOkADkE4gBTABsJ/QOVBeIETQOUA7P/mv+C//n/YAEoAej97vc9/Xz3BfgL92n4Z/0T+wsAqAIsCOQCSQblCbYECgKOBn4DBwKk/YYATwLv/Xv4Evow/CDzl/Mh9DD/tfUa/RIDGwFTBh0E2wc+CdEIjwnqBNcLKQbLAC4Fqv3jABUAqANX+/z/nPwd+Wf4cvZf/mv5evgJ/kj/IABC/pAAUv58/CcABv4oANf79AFyAxoEFQLKBScHXwR4AYQDjwSuAvACJwOp//IDSAZ7/CADvf7yAp74JPpH/Cf1YfuM9M35lwJp/7f9MQW3Bm4BKv7cA7oHPQPNAU8IVwQQBTP+JwA//yb6Zfob9aD7+/ON9/z3Cfsz/G798gWfBlcEWQkqBs4KZwesBLMIggE+BoMAlwTMAO8C+P4n/PD7Kvue++T31/qn+xQAtPx4/a8B5P2d+6H65/2f/xX5GwObAXr98gP9/7IBYQJfBUABvgI9BNkDsQTb/wwHKwPJAlABqQPZBz7+zAAr/3D6DP4p8qH3ofuj9qn3kv7OAjkA9ABCA9UJkP8wBu4DmQPuB639ZQXpA9kBi/6u+yv+H/UO9c35jPIg+Tj7gfsH/zf/pAfZAWgIkAasCsYDywnXBqYDGwl0AJwH7wBAApD6B/1N/qH5Qfe8/+b4b/yW/T7/3PwB/FT/Ifu8/jv6fQEm+7MC/f7jAfIBYgF8BF370AHNAoj+hQTHANIDlgn0A4kG7wFkB2gBaP4iBQgAcf7C/IT8lvts+2r2efso/cz5JPyO/iQGHP4YAL4E5gEcBlEDjAJmBdAFUwOsAkwAF/y2/EX4cfgX+VD2wPqc+Bf42/5n/4UDFf+GCJsESAJeDXoGQwb6BB0KXggmBdf/DAMU/b/9//pK+0z99Psy/U/5wf6q/xT/3/eO/zb5gP5g/Mv8Zf5y/vsAogFPBGn/cAMQ/McFdv6o/4kEYAXPA4IBlgWSCu8AUAKhB+L+UwS4+yoAdP1A/wX7R/tp+6/+j/Xi+wEAgPY9AJ0AOQFOAhAELABsBxMF0wq8AJQJaAQG/ocAgfhn+UP6gfqt95v8mvTg/WP3vf60/Q/7lASuBGsJewn6BhEM6QfE//gJpwNSAD0AKQIC/SsDMwH5/Xv8jvzt/aT3gvwB+U34AfyX+LD/pQNy+ysDvvuiAOf6Vf/O/nr9YAOdAOMC2waAB3AAUQYa/5AC6//gBPMAmwJVArAEBQS0A6wAlvzu/dP8cvuu9xv7hfef/Vz40v7B/BQEGgEbBVYGMwnjBOoBigOHAnQC9/l6BUL/Nf4R+9b+U/aI+Gv2Ivvc9gH9tvvj+5wHzQJ9BMAGIQqWAgsK6wTaCckC9QRh/+sAEAHZ/Vn+gQCd/Yj7MwE0+zkBBPYP+yD9Gv96+uX6NwCjAbD46/0hAtj8dwJg/Un8DAQ0BxT+GAh3ANQDMQA7Bl4Gmv4SCNoB7wImAoECigRKBwz7RQFy/av8lPbd9jH58fRi+37+7vsv/EoFU/xTBs0E7QKyBwkHMgOKBtoDeQbr/WkB0P4m92X8y/Sj9p/zffiG+Bf/mPz6BLP/KATOBRsEfgRCBW0IqAfwBlUHigvS/7kCH/9CARv8Wf3Y+jr+Zvq4/MD5/v6t/v/3lgLh/Oz/+/fg/mX5K/0J/SMDVwExAyIEsgLbA6/9jALI/B4DygDIBaMDxgU4BYwDhgSyBjoC5wW5/9H6yPvE/DP6QvRW/T/9L/nR/ukCS/lYAtr6DgHF/9kH9gMKA1YJsgR8BskEhgac+cL9SP0T+lj7yPed9kH7UvYZ++j5BQJMADr/QQPMBJAIrgdbAwAFRhBmAEADgwWjBMn/Rv7xAQz9zvul/931IfzB/uj12gAz/Tr/d/tg/6X/uPuN+cX/cfxd/kUBOf4KA4/+1gGyB6wAFwQoBEr+nwWe/FwHg/4vBvQGegJcBuIGuAAx/8UAFvgd/9j8g/dQ9V382/gU/HT6CgOk/F8HmwOaArEDIQK2BnMChQmrAQQH6f3/A4v6JP1792X3sPqS8oj77/qS/s/8BQCa/GQE8wGfAUsEywqQBSMFegp7B78GYAJ6BGn+PAIeAJ/8WgBD+wH52/+O/DH/jfku/Wz79fy/+vP7yvyf/kECav3tBDr7QAaeAOz/KvwxASsCqP7kA4IEwP6QBV4GXAA+BcYCXgQK/VQGuP7kAsf9Zf43+aT9x/63+F34Rvw9/F/7+gIq/AADXP3MCMX/oQbYAKMGgATyA2wG+gHaAfv8sgN88Wb8q/kD+Z3ywPv/98r9CPymAGkCUQR5CLUCfAwGBXwLfQMsCbgAlARw9+cD8/+2+oj/4QJUBR/5NgEH+bL+4/iD/hb5Cf8BBPf6afntAMP3zAD4AVr87ACAA3MDqPutBiAEvAMWA5IFKfw/CHoBr/5ZAYACOgRVCFsE/QGcAir6AgP182z+E/Sv+pf8wfqK+gwATf+vAA0A1f+cBzr+iwmS/JkG5Ae1BwEFSwKe+WcEkve8+2T3lvMj/Er4cfuv9jIFS/lqAwAAQAgjAwAHW/+rBbkB2Ab/BDIF7wicAZMKhfqCBUT3X/2o+mf7mfreAvX3ZwLO/pj9pPw5+5MAlfiTA8T2EAWL+m8FJ/2bBTf//AAJAikA1/6cAa8D4f/UBzUAnAvBAJ0NFvvqAzwFsv6L/xUEN/WEAMT90fOz+4j2c/4a9ycGaf3zBCH9DAhz/ZwEN//gAeYIXQOIBFgCVwbh/QP/T/T9/4zzGQD78HP8UPvA9pQAoP7y/+kE2QZiBMUJNQL5DAABcAsLAM0D1AWaAl36CgYs92r8oABI9XwDzPc1A338eP8T+I0BMfkRBRT4BgADAO/5zgO/+1H/xwGKAGj/Cwic9mQP9PWeB1kB3fy4Cb3/TgIPABUE0wIuA/IBLgmB+CcMCfcu+aj8x/hw+O77Y/tC/j4CJftQBH76LgVoApUE7ATHAp4HpwnE/yYFdQGj/8b/k/jB9O/1VvdZ92f4J/0VAO78qAfq/QkCEQX5BSQErwchBFkKKgZwCOUAGwFCBRf8IwNR9YMBsPW8/v326/66+wH7gAEz+3H/dfwPAzr9GP17AGcGePvpBpD8bAHH/FoFk/yCAAADovzpA6MB3wMr/KoHxQJ2A0sAvguE/kgLtvltAxb90ft4/wXzuP4z+Zf83ftNAtH3dAhl9g8MKf6RAyQFdv5tAZoBgwQqB10GvvwgCLDwFwbt7qv5B/iz9WT7Uv49/YkCFgA8BH8M8PwrB08AgwkmAKsHzAKDDxv9ugjR/6f8wQHk9N0B6/ln/OX8v/1j+pAFgfPgCwH4NgDd/qP6VP4q9rP73gHSAWf79Qdc/oILAfvxBEX5swPXApf8r/4CDJ//2QFOCXIASgar/sEFM/w1Ac78+gFb9FwCWPmZ/fL+4vtN+RIAkAB9/iD68AHvCLn5fxAvAnkKNf/8BOf+G/vW+vb+EvK3AEv/9fi4AZD19wCZ8/MCVvvHAdABmAkCAQgKjgVTCSoB4ALXCrf+wwXbAdYA5vrTBZj0Ewfs9S/+kPvA+hb9yfwp+0X9Bv3V/vADePsGDMT1IwrN+1YCUf7L/pr8/ACU+mgAGQUg/dQMUvfWCjMEYgJBAJEIcAH0CQH8Kgey/H36HgF+9X8B//YW/0b6Iv569XkGQ/ZABi7+4QHoCScD1gLPB1gDrwDMAH79pwTg88AFE/WqAdr1+/0o/Wf7ofiv/msBhADxBgz9mQcAAmAEOgGNCTsC9gc1AswLOQFaAM4CpPiP/HH7GvlI/n78/fsuAJL8OQf6+CoCzv1EAsz9j/0W/HX7yP3s+iwBiP2bA24B8ASOAdUBQv4CCeD9qgFuA/EGsALQAh8J9AQ5BZr8IwEt/CQBDPu6/rb3EQDZ+Hj/y/kp/b75cQEM/q39EwMB/hIIiPwYC8L8gAh+AagHN/0TA+j3Jfxe89/1GfvG+TH+KvkTBaT8rQzp+owF4v0hCAEDrwWAB3wIj/8bBdr8mwNwBfP4ngYk+Y4HOPT7BEj5o/4S+vb8u/0P/6f/dPmL/+77HgDy9y4C9v0gAlb4GQ7T9WkIEAbcA0IApPwYBaD5BAHu++kELQLQDYH6txIS/bwEsAES/d35Iv5o+Ab7qP5f958AOfaCCzP4IQph/PADtQCzBGT8tQcKA9UA/go4/vMEzPkrAary/vzu9R3/yPTNAov0l/5bA4H9dgSu/AgOyP+kB3UB/wTCAR4JmvptC4kBiAsf/zj6yAFu8/L/6fKV+oD87QIl+3gEMPrQB2z1SATz+Y78/AV2+VMBC//3/hoCAgULAdUIBv0HCPTwzQd7+F0Ba/9CBLcGTgNmCngBVAajACwBRfXkCAr9L//F+mABg/l6Arv6QvvK/M7/L/tT+0EEevwVDCQErgjtALUIIwCd/y/4jQH59wz56/629y3//fxV/Xz83/o8/R8GZ/rZCMf8lgn5CNkGtgjXArIGzgW//aYBsAHk+dwE6PmC+x4BzPyT/08DzvEQB5n2bgFp/nXzaAKD/PgC+v9Q/XQCrwMb/8sEiPf3BGv8gfx5/R//yPpfDH0GJv6GCL3+hgrt+NEAQgGL/GEI6f9V+L0L3vvN/zEBPvt4Afv1qP2N95H7Nv+SBSECugtSAnEFQAei/OUBgPq2AEIDvPxS++z7X/pw/hP08P9Y+o37kQBD+zgCAP/R/zMNSgX5BUIMugL4BVj+IAFI/40Ep/90/EsAhP/p/Uj9+//m+08BqP8o/wj96Pz3+6f+1vzD+9/9XAO+ABH5SQQ+/g8GIgLq/iUDWv/CAKX+NQA//ToBkgULBAsDBwgEBkD+JwJS/Xb/vgCN+Tj8k//8/tYBy/ej/aADJvW2BWz9MwSaA+sDu/60AvwCOv2dBaX+uwSl+0j/s/vi+R/++vhi+Av+SPiwBbH3wQBDBMj76QjX/QsHwASXBLAIGQLn/ZYGWv76ArYCxwLLAmT+pwOhBPn6B/wGAHbtX/7/92787v7gAMsEdv2RApAGtP7c/moD7/iEB6L5/v7I/VH6hQPYAKQBqAU9A/n/5v24AN0Azv2HApQI6QcBBjcHcvoKAPD7A/sm+d38FPt0APsBBP45ABwGV/rf/ogIGfwkDvv8Uf+sAZX/cwRg/ET5NgEW+VsARPzN9YX/IfiX/iT4tQYz+RgFp/mFB3QCYwiDCMoDvBGZ+1ULiP5M/2L6RwSr9QwIlfZpBXf6XvsDAIr4Q/6SAMwA0vqACwLw4gUY+m0FI/cqBW8Efv5vADYGoPcGBu380/4+BH32GQ4B+RUB+f5TBIf4dxCB+s0KSAJtAIkEhAID/4QDewA4/qIBt+35CUv1ugIR9lgHDvzyBEz/eQAWA1ACCQTp/i4KOPtJAsv9Bv/k8YAAz/TC/zzyrgCh+g4AcgUw/rP93wnCAfv+fwnV924NcfyYDsr+TQ/MA1UADAAgAHX1oQJj+HX7wP8F9dgL9PdMA436ZgBm+aMFl++/CDr2UgGCBTT+dAViAqoEKfzEAiTzswd49QcGSPczCAEFkQKH/x8EigDABMgE5P6/AnP/jAc88bsIg/cKA038lQI4/PIDlvnPAib//flABtr+GAsq/BAFNgIDAU769QcN7hQJlPqn/kL/k/cy+BX8Pv7U+Wb/Cv+bC6z0ngwN/EwGkAkmBgT8tQ/P+gwJBv0M/z8E5veNEODzDAYl++oCHPt8AkT0v/8O+TADpPht/WUBSPorA178Nv7J/egHg/JJD/70DwiOAZ4F2gAZ+s8C2gFQ/QUBrP6DAB8L7fm/CVH52wuW+fEJKfsBAkr5UQRJ/Br80QVn96AOPvO2AZr79gIJ/O8DKfmNDP//Zwh3AYz7AQJk++wBp/Sa//zxbwXd7wsBrPhMBdr/f/73A5z77Ait/v0EV/8VCuwAvxNB/uQItwOGAdj4gQB0+T72vgm09VQJAvKrCNj8Of3B+fEEBPk1AyT+EwCy/fD2pAd5+nME5PoTCFf71gpS7pcIk/QZB/T6oPtqDCr5kgv+Ah8HhvyYCJz6OQr38BoHTf9qARwGav13/kcGXfvu/XH52PhGBZjyQwyH+RoFUAQWBhH+IQmw9TQJbfuQ+NQCqvPWA+zyLAAo+hn9SwCTBrnvqAlK/h0B7wBY/y8CWQpyCmAFdwXW/7AHtfYAChX2OgAtBbIALfr5BtL9hf8/900F6PvB/B8Fuvg3AIX54wHV+IsDNPw1A938LQV+ACEBqPmDCKj0rgI2BI/6eQRSAV8ItPWYBWj+MwaA++YF4QN4COQAJgP5+uIAO/1A+7f/FPm8A4/9bQru9ykJA/jpBrr7j/+YBAMBmQRR9ggIMfgo/ssAQ/WI+3ABUfXsCp3sOQh4/Kn4Lgaj/1AEfv6YCpMCkAQQ/XcU/PoNCk37cgRvBrP+ZvlV/FQEGvTyAVn8iwGc9xcC6/56Arf7egWr/ef+Bf2r++QASP5wA6j/ZARLA/r84PjdCIPzxAKQ/T79gwDpAdwGH/iMA7IFzPzXBYEIT/9QCHr7wAS+/K/+T//B/PgCX/18AtgHX/kT/F4FV/nIBsQAKPdOCA//2vhJBs3+lQNC+WcFiwH08mH/7fcg8az3zP3e/PcFdAUCCkEFVAOzAAX9XQeuAsf/YApyBy8Dngl198MDe/cQ+Z8A9Piy/Q8AIQWa+UUB7v/fAcf5xghO+OX7HAB8A+j3ff7ZA3wDBwWDAZcHCvFaCWTyVPpCAekDQ/rWBhgAbv/OAV3/owhS+SoHMwMHA6IAaQCC+3gGjPz9Ba0A4/fCBan8uPYfAzv8xQsQAZn8GQpW99UOjvlD+RsDIft3+kv8Q/q4//b2sf6VADj83gVg/VQCMv1mA8n8uwQ3/f0OMQE0AvEKS/1aBDsAUgGFAB4FLf2tAEv1ywUU/Sj7of1XAzT9Zf5L/WP3kvz3/7sCt/hwCov/gP1VA4j5eQDoAMT/fAYC99gL5/sd/hD8TfzUABH/PgcSABkILwCNAbv7ZAdW/nwBbAEaBdgAdQPw+FX/yf1Q+agG7/sKCvAAvQIg/BH/4QG7/rn6BAnY+7kBLAUr9Gf62fmR+nj6FQIi/+ECnfXIB4z20PwxB2L+KAW4BMQEKQY1CMEABAYK+u4LL/+f/9kBpvaQBRn98PYXBBH54wRK/qP1GQd3+ur96AB7AEX3LgcY/a39SAOa//4Aiv1cACH/O/tZA0oCfvokBiP4/Ahw/WT/rgGA/nAF+gYQ/wMAPQPQ/0YFMPdZB6b8U/8K/JP6x/7YBhr57gaL/6EDdwRf+z8GEvncCP358AT1+0kBk/wa/n/5pfqF/HD25wPH/db5xwYJ9+3/HwTW/HII5v2pDuz+bQnzBQYC6ASLA7f76gSGA4799v22+ygCv/ex/378nABy/RQDbvo3A2f6dvxNAbv7NwPy+xgEQ/+i/h4AqgCT/rQBHPKsAbD+MPyBBBsClwGaAQUBpwFQBcH5CQZs+o4JwwO4AK4KZ/ujAEz8C/fiAgn9WgGxAZEDpv1cBF4BqP6wAmP9Mgyq+MsHSvjQ/nT6q/rZ+RD8vfzg/Yr2rPqyA071FwfQ/oIGFwMLBf8AcglY+GsMS/+S/wQF+/6Y/YQDOQEpAYYA3/0G/xj4Jwgk7U0OtfpgCyX+gAAjBYwA5/d+/hcAG/eKC6f44wHw/iAGp/MtACoCkAIW+p8BH/X9/ez82QAvAEP9dAsXBIcIKwWDAzr7igg5+bUPTPVCBDoASvpEA3D+xPvvA7AFtfscCan3wg8Q6ygKt/g3/D4A1/cM9tYATP0I+5sGUOzlEOvzsgQj/5D8xvYPDXz7cwYo/gsKqgqL9H8MXfibByH9WwZK9xYNq//nAgX46AXD/Oz2HQYc/Mr+BwVL/yEFAAk59IUO5feeCZ/3CAMGALX6EvpR/xv5l/nTAIj0VQTq+k8AMf69AYL0LArD80kLk/5mAS8JIPzSDmL8awffANIDDwA7BeD6LAjd/04EfgD5/t8C4f4Q/M0CyP1f/UQMXfRAA9X1PwVF9D3+1/2x+7v9Afyo/pn4ZAT2+P8EeP5gArn+qQjP/H3+zAAhD8r13Qo3Al369AiM9wwA+f23BbT2Qwnf+pQJtPe9B6L9FP7g/1AFv/4OBkwBRfnOCHP4Kgsx8NwGVvWPBPT1RwCT/zsFxvk9/VH+2vWI/8b1IAAM+asGOAKbDSz/PA4w92YTP/m0/1YBVgBSCMP4NgViBCYB1wMnAyT4hBHZ718I9/hXAkX9/AGt+aIGWvzk/TD9NPJxBKTu8Pvm+JMBJ/EfDqT0ZxBZ+lQGwAf+ALEGJ/49AiADigRD+bAM1PSREXfu6wtC81L8wvxqAD38Xv88BIwDow9W+wER7PPgBvr8TQA0918HYvakBJX0BwZ+/mf85AMk9FAAp/3//1Hx0AXf8NUI5PQUDxb+FASwCVYDKv1XDLUAdvzkDN/2agwG+SAIJf31Aa39ZwbK9YIPcvNVBvkB9fziCbv8/Qiq+FEDafMKBvnpcQNh9Hz8Yvfw/QwEDP5/+/QE0P4xAGoGsfcYDvn09RCM+SoIJwXKBaj+pAJU/Jn5bP4N+VMB8ffYBqX/OQSuBM0JyPMTEE33mgE+Avf6rwtS9jYG6/5R/J4JxfkD+4IB5/Sq+0r3g/5t9aQDhPTWBvf4af2BBTH85gTq/G4JkgXbA8gJYAOA/P4NKvvzCEf2cA7EAXX+GwUB+4ECf/7V/0j9Zghk8z4FnPYZAMz9rPQ7B8v7PvxrAaH01Qg591/4+QK59wkEhwD6AKMCRwph/ZEFUP2UBZoCgPwJB3j+yP4EBRb9zgAm9b0BygIY++sKu/sXBkMC5wPv/64D4wPKAjf+d/uFAQ/69QB4/wP3Awgf9gIHjQC88PsMK/JGARD3//h6AkH0EwKm/04CtATLAPQERP88BHoE/AIJAz8B+QjCAKAJjABF/yoEa/78Aer9bAWv+XADUgPW+g4HdfxD/5j7f/dv/jf2Kf8x9MD87PqX+X8DdfoqBnX7LglBAcQBLggFAy8A0gT+AIEFzgD3AhABwfitBJ700QA9/HAB5fnbCeAA9QHQBiAAcAaH+6UFIfo+Bsz8KARr/+UCDQSh+Zj+4/gZ/CH9+/hm+YQA8/sd/nT5NgRz9ogDov0v/6wJu/nX/T8FSQaR/sAFbgZhBQb7uwrH+B3+mwsd+eIBEBLd+toGUgJb/yME/PVlDa/w0gFwBgD0BABAAEnxR/vX/Sr8mfomAI/+DgGf+9j+WQAF/ssGg/ogCZr+TwVaAl8DSwS2Cdr9iAVS+7z8Rfuh9Uf+pvqPCBIEngS7/94K7PWmCND5IgLeAO/7agcB/1ACiAUU/LsEw/xY+qEDLvJTA9ztsQQD9iD+2/uzBMMAB/6f+cn8GAZX+lsDiv6rD/wBRBAoBVEH+gMr/9L6gQdQ/ScCT/n0/88GOfvvCGT8lAei+w8EmP8n+1X2z/iM9Uz7KPcHAYP7Rv/8AhP44wQc+mAFsPpOAgMD0QQXBKUKbgGQAxYHFv6O/cL6LQAs9OoDUwLyAd79IwfN+18Gsv/EAIcD5QKuBPD6uQJx/+8AQvckCQr7HgWo+0L/E/t+/Qz6rQFA9aj/lAHo/R8GBu/+CDvz9gLG/eIA0/ujDX/7sgTN/1UE/wPs+O4Nkf2BBf8EoAeu+oMNff0JDi/6jQ2v/Ez7UgPk9SAAffiWAc34UwAv+OMAh/EFA5X2FgGe8LoIZ/jPAZgBEfkwD+7yoxEf+ggGXwPIBzP2+wqw+gkHTfrFBOoCTfSCC2H8S/1a/KIJEftJC6jzWBIa8WcMMgA3+IYJHv1bBJj4dgHO/YX76P8CBvTukAggAE8A//NbBCzvd/03/nf03gPg+9MLo/ylAW4MSwL//9gDvv/YBc37AgaoAGcBAgdJB9wAUgj6/KgA5QHa/VwK+fRnA4ECuvj8Adb3Y/gx+ZX2dgMx9EL/aAKf9pQCJP/P+5gI3f8uBHoGwvz3CiL+8wETBUH4fwSzBmH3yASv+mz+dwLl+F4AwALqAkT+Uwfg+6gEFgFV/+YGIvyUDXj4mQB5ByjxBglW9RP9yv1J9qAHUPqp/moB2fY4AGX7T/k8AfL7aABH/ZsG5AXa/0L/yQFMBNkALQIjBWMDEQIyBPUJYwPq+0UEVgJQ//4MB/s1BHAD4vjHBHDwaQu/82b5yQMy8FIGUPdL+K/4fv9hBFX6KfmBAmX6K/+uBEX9JwUaBoUErwYuBVH+nwnD+goAkPl4AR7+1/+U/zf1jg79/5cLY/j5BSoFrfuNBIT8gwobAWwAoQTr+LgFs/1Q7/sCcPj0AJ72QQC9/Qr4NgOE+NYFt/hRABEDm/xzAH/9DQLyAyT/PwQVAiP/wwUdAtb/DAFnATn/owdP/1UJe/pSDFEGUPt/Ezz4GAVj/BX7nP+P9W0CB/ti8ogAp/OF/yr3e/hJ/5z4egiV/uQEswHyAaEEmPpMCMEDwQKBAlL/Ygc78fwCQwVN9sr+KAXbAMgEyPwOAuQB5fnZDOv3mQrbCSP7yAlg94MHpvVc+LIIr/izAcb+pgKMAY75vfz3/Lf30foJ/bj5k/vBBcvytARRBOcE4AMVAFoJzfnMAVYADwC7ABsDtgRCCB4CgwtoAmn/cAX+/0AAMgWiAdP9JQIF+oEDHPYZAaD6x/VCAWvwUQG88hcAIgFp+8oC5v63BKD9pgKI/a8ErvxHBx4DgwGnBF4EN/w3BQH8ff86ASj8qQdr9JgLz/mwCI74EgOvAtn+cwaw/o4GK/1eAZEJ8/diAAYDjfX4CInqdBIE87YBjwFu9A0H2/Y1/3b3M/nF+SgAH/nQCt787Qa3/68E7wQG/sQA8gSv/dYBsgsQ+ogLtABkBxwDSAcLBXADVP4LBDcA6vPZBQr1NwLV+zn8IwKt9Kz88PzO7QsHa/wz/r0HqPi/Cn35yASb/7MBuv0cDPsCYQMiAD75GwVk830GfflZ/3MI3wFH+MkH0/xpBT37ZQadBgv8DAlO+7gDCPyTBrr3awvc+AMDDP+n+gcF0/fj/Mn7cwFM//787fTeA0/z3wLn9HX/uQSb/dwDcf1QAMsEDAKL/oAJO/vBB9cFuf5D/1EDZAEBBs7+qQof/hgNAwO4/dcDm/zUBw/4Gv+m9nX9wvbl9RT22//D/HwCPfnF/7/7oQJXA6D5ywdRAUIHMgA+Ayf9FwQBBi39M/6YAxX97ACJ/Zb73QAsAaMF2v/8AnADgwMpAj//SvyNB2UBl/tMBGT8ggVD+4MHQPzC/2gDCv1p+ov9Zv9x85cF/PJt+p4BCP1n/eb8x/ypCiXzgAqT/xX7jAhq+tYFN/tACMAA3QL8BDAK+P6LBuIE6ATBBL8DegTMBOT6WQbx/ED1UQS07z3/cvdE/Ib76fppAfj4jfdMSVNUYgAAAElORk9JTkFNEAAAAEltcGFjdCBNb2RlcmF0bwBJUFJEFgAAAFlvdVR1YmUgQXVkaW8gTGlicmFyeQBJQVJUDgAAAEtldmluIE1hY0xlb2QASUdOUgoAAABDaW5lbWF0aWMAaWQzIHAAAABJRDMDAAAAAABmVElUMgAAABAAAABJbXBhY3QgTW9kZXJhdG9UQUxCAAAAFgAAAFlvdVR1YmUgQXVkaW8gTGlicmFyeVRQRTEAAAAOAAAAS2V2aW4gTWFjTGVvZFRDT04AAAAKAAAAQ2luZW1hdGlj", +} + +BASE64_AUDIO_DUPLICATE = { + "name": "test/test_files/audio_sample.wav", + "data": "data:audio/wav;base64,UklGRuI/AABXQVZFZm10IBAAAAABAAEAQB8AAIA+AAACABAAZGF0Ydw+AACO/w//5P6R/9D/SgDJAGIAegA3ALkAPAC8/zEA4/+G/8X/3//f/+n/jv+d/87/mP+p/7v/jv/C/ygAogB+AOQAHADX/1EAQwCz//T/kv/B/oD/rf8VABUAKAA3ANv/4P/o/8T/5/8o/6P/dgDDADcBUwCu/w3/+f5Z/5L/YQCfAMsAaAGxAXgAg//m/lT+Rf6k/lQA8wAXAR0BtwD1AF4Amf8g/xX/Tf/8/rb/FQDc/6sA6wAJAeIABQEyADn/af7D/b7+Mv8nALwAdAFAAooBswAKAEz/4v66/nb/KAAlAEoAQwBIAM//qf85AGAAeP+z/5f/n/8rAOL/MwBkAMsACwHxANUAjP8B/w7/2/7X/vj+TgDp/0MA5wDRAOMA5v+Q/+n/1/+C/zL/qf/y/yMAhQBEAEAAyf9A/23/JQCZ/5EArgDkAGMAmP/o/9b+Hv9O/8f/mQCdAIwAYwDX/3T/5v7//8r/PQCNAMIAvADq/4//SP8yAMP/1v/t/67/AgBaADwAAQD+/4YAZQDmAHAAgf+S/0D/D/94/7oA1QDaAMoAQgEFAX0A+v+S/i3+lP4o/ycACQBlAMQALAHxAJb/ZQBV/4T/z/8HAMUADgEuASQANwCCAD8A2/9e/wz/O/8u//T/+////ysATABVACABbQAwAMX/tf44/93+vf8IAHEAJAGnATYBoQCn/3j/VP65/vz///83AE8AeQDD//X/b/9RAMz/vwBmANP/dQAaAKT/vP/X/57/xP9B/1H/Bv+nAPgALwF3AY8BFQDe/9f+tv73/qT+hgBPAPcAOgAoAC8Akv/C/3YAaP/3/1//d/+6/6b/TQCAAPMAtgC5AN7/dv/s/fj+Ov/6/+8AfAGQAagB1gBV//3+kf7R/oH+jv/H/3AAdgCYABAAowDK/97/uwAEAJEA3v8SAJ3/b/8vAO3/8f+QAFT/OgCCAEkAKwAFAKL/Qv/S/4//yP/s/2wAPQB3AF4AlAAXAAsAZP+a//b/rv8ZAOb/EgCt//z/sQAlAC0AJwHs/1D/G/68/k3/z/+TAfgAewE7AvwA8v+Y/nn+7P7E/YMAmwDQAIABYwBxAEYAHwBrAIP/Rv9m/9f+GwBH/7j/0wCVAfgBCAHJ/8f/s/7+/rb/BP+v/zMAzgDa/+T/twAfAKD+7f91/+f/sQDq/6H/AACZANAAfgD1/+n/aP6h/9X+uP4CAHkAqAGBAT8BkgHZ/33/Df9j/jD/PP/HAI4AIwChAKsApv+3/yD/kv/+/x8A+/8v/xsASgBbAIcAdADy/4YAaP/w/8v/T//U/zkA2P+dADQBdAAqAP3+bP/P//r/i/+M/in/bQAaAEQBhwDsAJcAXf+o/+T+TP/A/1cANgCIAI0AJQHK/53/AwCqAEQBWAD6/8X/dv/L/83/q/9rAFsA/ABPAMf/xf5K/+7+Sf9nAPwAjAGYAA8Ar/+b/5L/kf8m/z8Ad/83AVgA2P/cAJn/VwDG/6P/gP8Z/z7/XP/P/oUA7P9XAK4AKwCNAKn/Iv9YAAUA3P8DACoAPgC8/moAFgA1ANEA9P/r/7IAxP/c/kD/vv9cAEoArAFmAVEAagBJABj/yf+X/z8AGABY/2kA2f85AC4APP+c/+f/yf8T/+r+bgCu/x8AJgKUAbMBTAI6AGv/TP7//X7+vv7sAL//bAEnAoYATgCt/+n/Uv9w/tP+j/6i/0YAUAA8AXgBIQJEAfL/Cf6a/if/iP9bADsBugLiAiMBVv/e/r3+EP7s/Xr/qP9z/4AAQwCk/7MAlwDoAOgA6f+A/+n+D/9E/if/BwHTABIC2gGEADMAUf9P/3D+lv7F/sv/6QBPACQAWwDgANn/2f8I/z7/7P96/lr+vABgAWYBEgJaAT8Asf/N/3n+FP6N/kP/mADsARIB7AC4AIX/kv54/v3/BQDf/0sAKQCqAGEATP8jAMr/7ADtALL/9f6k/pT+vv7t/84AyAG7AQECJwDG/7n+d/2X/uD/6QBKAZ8BOgGbAAwACv/f/goAsP+d/2z/QQFJAML/uP/Z/xABmf8LAE8AEgCM/wn/c/99/04AgQHG/5IBOwFrAGABOAC+/+/+5v6W/j/+qf/mAGX/9AC/AHb/i/8g/6z/n//J/2wAiABZAZABiADBAMP//f8PAE4AEgAvAPH+jv7A/+n/OgDk/4wAKAAVAJUAj/99/tP+Mf4AAMgBGAFZAZUBhwCh/2b/Y/+C/2f/6v8X/3n/+v7A/mkAr/8ZAF8B/wDBAPH/8P/o/9j/TACr/wwAZgC8////3f+4/mz/XgCF/9D/XwA2/6v/pv/3/1YA1QDmAFQAnABDALX/NQDx/zEAewFfALsAVwCH/77/7/5m/9D/Qv/k/4n/7v7S/n79tv/DACEALAHaAacBugDfAJIA7v+x/+X/EP+d/+j/2P8LAMH/Iv8PABcAlP/I//D+VwDS/mT/jwB4APUAwAC5AD0BAP+PAGsAIP8gAaT/sAAqAL8A9AAG//n/SABU/nX/uv/p/37/gP85AMX/aQBMAMn/Mf9vAOb//QBHAPn/hgDi/ykAGv9h/kAAqwCU/wAAZQBgART/i/+F/5D+YP9wABoAUABNAe8AcwCbAK4A8f+oALYAkP89/8f/7f7+/8b+Tf+yAPX/CAEHAaz/ywAbAXv/Kf/R/5EA2f9uAQAANf+5AKkAZf9T/xABLwB0/yoAIgAKACsAGP+B/93/mf+6/+r/bP9s/in/fwB5APAAKgEvAdIBTgBsAFMAMf+3/s/+GAAWAL0AQAEFAH3/cf8aAMj/tP9+/+D+lwDsANP/mP+DALH/pf+MALQAwgDlAAwAbf/5/00A5/99/1AAZv9q/8H/0P6+/vj+4/9hAdb/xwDQAIX/zP7e/uD/I/+T/0QBOQCtAE8B3v6DANb/Dv9T/1YA2P9p/4QAngF0AfcARwBD/9wAGP8u/yv/z/7T//b/yf9vAKIBlAALAHEB3v+8/s7/H/70/LD+FAGGALcBZwIeAbkA2gBB/2H+0P5V/93/ZwC2AVL/uP+o/yj/r/+6/p//hf/K/qYBKwIoAUIA8wD8/zD/ggDC/tr+2v7d/9r/RQE5AgEA7f+TAcn/Xv8AAB0AlP65/hUB5v8nAU4CBwAI/xgAU/5i/oz+6v6u/7sBCgKuAQ0BkAD1/rT/R/8+/mkA0f1n/4cA9gDLAKgB3gBg/1cA6wCX/lT+AQAG/m7/FgGo/xAAeAExALcAbf+//x7/Uf8pANf/QgCbABcB8QCyABD/rQDQ/gH/9f9F/mcAbQC4/14AtQA1AW7/LP+OAGT+9gDsAEb/BwEbAMoABAHS//z/g/9i//T+qv0AAOv/b/+QAKj/2gDKAScAdQHl/0YAEQDn/+kAzf6xAEgANwAGAGYAOf+D/zUAdP6R/6r/W/8oALz/UQErAKEAGQHv/jQAQf/B/2X/CAA6ALcAjAGAAHD/NwGsAHQAAP++/r//Yv6J/+j+zv9T/0YARgFHARgA7wAdAIT/RwCe/yEAQgAuALT/FwCYARMAV/9pATf/XwD+//f/F//V/yb/fv8FAPf/dQCP/xsAMv/mAOH/lAA5AXT/Vv4/Avb/n/8mAcEAhP9i/+3/4P24/8H/JP+g/iQCZf/wAD4B1P88AJgAXQDY/oj/QQCQANn+UwCd/5gB//9o/w8Apv8n/4X/t//j/4sA1P+oAMf/UQFv/zn/sgAtAFMAogDm/4oAkADBALD+5P4qAWz+bwCI//P/0/5n/1v/R/7R/5gAqQCvAGsBpQDyAAP/JQDr/9H/4P/8AB0A2ACBAGz/xv7U//H/cv/PATD/6/5p/44Aef+c/0gAhQBOALYAif/O/0YB3QD7/4IBggBKANcAhP5CAF79qf9H/4n/yQKd/2sAMQC2/uf/y/79/yAAh/+oAF8B5QCG/5L/b/8YAB7/pgEV/xn/3gD9/sf/TP+M/0oB0AAUACX/Af97AQL/Sv/F/3UAqwDbACMAWQEGAPP/LgGe/3MAcf+7/ZP9X/7t/f7+0v6lAiQBhwI1Az4A0v4//3v/Vv97ABQAKwFw/+8B+f5m/y3/Vv6vALwAHwG6/qb9VP8y/lj+WwBOAWcDfAGiAAsAFf8+/SL/of7l/5UC0gLHATwBYQCU/oT/GP67/sr/SwLI/3D+GAA1/13/uv81/iYBygHA/+L/tf/IAFD/EwHVALEA6wDbAM//fwAdAJr/3P86APf/DQEvAZn/NgBv/sH/Bf4YADL/d/7BAOD+3v95AmABEQAOAIf/5f+0/SUARwKy/zMBrgGz/1QBW/5g/6L/Gf9wAEr+GwEeAP79af9v/9D+4wAI/yEBwwAb/7MAC/8pAEUChwDwACQBnP8oAKH9mf/k/uL/MQFsAN0AQADV/yT/7P27//f+pf9NAPYA/QBcANgBgf7jAaf+7v+V/4v+cwBo/nMApAJtAV0AMf+zACQAAP4tAFT/oQCX/8MBLQEpAboAhv8Z/oj/H/+6/9n/mP8MAcL/PAIeAQQBMgHIAOP8xv5c/lf+dv36ASQCQQE0BJUANAH8/zEABP3t/yP/Tv9NANYA5v4CAEcAuP8EAQMAx/36/BwAwvwfAC8BOgOmAF8CCQGvAJ0A0/1J/Pv9mgCN/8cCHQHNAWMAKwH7/Yv/mv3W/nz8K/4QACIAUgKNAI8B6QE3A4r/JgD8/Ef/Gf2AAVsA2v6lAT4CDQHY/xwALv8s/uP85v/K/OUB1QCMAHoA1AOlAqX/uP+h/cP92v2a/qgA8P+PAZwEvv6QAsr9r/4d/lL+OACL/jEB2AESALH/3gIEACsBnwCbAf7+5/6q/u/+/v0VARcCNAEYApT/1gCu/Z7+CP7U/c7/bQH0/zwCFQH9AKYAh//YAPD+nf+3AO3/aP90AQAAwwJG/6QBz/9N/OT/Gv3a/HH/pv6jAOwBkwEtA37/YgF+/gz+hQBaALAABwME/58AVQGT/kQA5P2s//z+yf+UAIH/hgBKAFX+FALh/3UAK/+O//v8cP4WAkAAkQIyAQsDbwFMAhv/c/2J/Vr+qv2BAWUAJQAyAOL/WwDL/OUBGP50/r8AzwCOAPsDDgIXAX7/WwBt/7j7X/+b/Ab/pf/pACgB5AL4AL3/KwCJACoAwP5v/8n/YABF/rQAn/8iAgYAAQKZAFj+6QCI/q/85P8jAQcB4QDTANoCr/3F/7b8r/wv/8P/kADhAa0CTAKlAGsBvwHk/TP/6/83/sj+Cv+X/9oB5P+GAgEACP+5AEP9uPvy/p//lQF8AfoCjgNP/woCov4F/ff9R/+8/rcA2AAFA9cAKwDIAP39zgD//q/+l/26/2L+wQAkAX0DAwIGABID0/6r/QL+m/19/z//wP+UBIX+xQHv/qz/1ADT/jMCB/9VAKsAz/43/xYCu/7AAN//lgCY/u7+ov36/NYAtgKeAekArwSP/3j/zP65/hb+Zv+S//P/6v9iArkAhf5xAIz/NgH1AAYA9v7W/zL/GADn/sYDZf8tAXoCnf3+/5b95P6A/xL+rQDnAQQDrgHy/qgB6P0W/5T+ov5z/4ECAQGeAKABawG7/zz/IAE1/Yj/AQEq/vX/NQFh/5gBIQD7ATb8lQCnAHL80//UANcAbAAEAkIA1v9j/wD/M/4iAZv+agF6ACsA0P9dAdUABQAEAZr/CwI4/hb9q/qT/zz+xf8UArUElQCZAO8CA/7K/+z9RP+k/r8CsgE9ANn/HwJr/ff+1P70AUf/Jv0CAaf8+AIa/9AAUgCjALr/IAAP/zICav9t/20AiP9qAWb+2AFT/Rz+vgDiAY/7fgA3Adz+9QDsAJ4C9v/uAUUAeP8gAKb9Hfw3/wT/QwEqAVoBiQGlAO0AwQBk/s7+Uf8P/noBnv8jAwMBB/4aAYv9N//JACn9zwL8/kcB9wJo/5EC6/4w/joBWQDFAAUAVvy6AKz9Xv5K/8D+YAICArH/AgRj/db/GP7//ZQC8P3YBZ8A7/+jALP/t/27/gL9vAAJAKQCAQEC/sQASv9R/vX+OAEA/3wDhP4mAgX9XwJw/6/+YQDW/gADK/4cAST+hP+6/UUDZgBr/z8AfQJC//MA7/8u/xH+P/76ATr8tgKG/tEAWgDOAu//m/9CAYv/5vzGAdcCMf8v/2wASwF//c4Ahvx0AFv9agLmACsAwAFEAjUA//6EAJD/PAAnARcCq/wTABIAA/1C/BsBnP10AlICegPz/wIAPAL4/N3/MQB2/REB5QFV/70A5PxpAwX+8/65ADgC8f4VAEX/xQF1AVn+6AEf/XwBxv5mAH4AE//k/YwC3P6eAG/9iP8XAwz/fgCvAvkBWABKAbP7AQGv+zoCWv9x/ywDa/2FACMB2PzzADUBAABmApn9HgNv/Jn+RAA+/bf/hQPk/jwDjAFE/0oBRPy1Af36b//AAggBeQAyAd7+6wFk/g7+ov8H/1sBZv5+AFoATwE8/m0CJf2VAen/jf87Auz8sP+U/6AA+v+bADQD9v/+/tcCgv1L/pL+Xf+X/WQBdf8FACMBMAGH/wD/qAIG/1H+7P+yARoBrwEW/xACMP8eASL+Ff7W/IX9UQHF/xwDkwNgAbEAuACn/cL+CABXAX/87ACUAesBxf5MAX//aP2ZAcf/6/9G/jkC/vwsAF0AswGK/00D4QBK/RAC+/2L/o398v6lAnsC7v/HAwf/RwGL/C4Be/5c/L4Asv/cAXYBvAA5/h8CY/4oAXH9XAHE/iL/YwAtAZL+2gJrAcT+VQMg/zYC/P04/+38ev9p/jX+mP2JA0ABXgBwAYf/CP8WAA3/3P8xANH/OgKc/Q4EcP7Z/pX/Ff/Q/d4Aov8WAZj/L/2wAQT/jwGD/x0BvgGH/1kANQJO/pv/i/0c/vcA+/6YAfsCJQGWAcT/JP8RAWf6RwAj/4f9YQJA/yYBkwAg/6sDjwDAANAAkfyfBKf9NP5CAeP9lv81AOb/PQI8/6z+DgCk/hgCWf5ZAG4BaADMAEgAP/7/AZb8qv83APT+tANT/6cBAQGT/1wAwwHl/AYAkwI3AL39pv2v/jX9Pf9i/6cBpwWCAw0DAQXDAKsBgP9T/UkCjP6b/hP+mf5A/0z5ifxmAEj7z/hr/mX5of6fBODxZwTiC/n7KgmSBAAKDQhb+3sKrgdg/Y4CiwEp/mz9oPzB+P/88ve/9OX9yvqZ+xH+Nv4GASgATQA0A0gC7QPoAVUEkgMWBK0BlwR/Az4CTwTAAdMARf+kBBr9KgDW/6QCoP/DANH/Yf5yAKb4e/zI+Vb4Dvvm+vz2cAOV/Cj7VQaJ/JQHgAgB+ikO5QUC/GgMxQOWBq8Fsfy/Clv/ge7vAhn5XfWI9FHxqQOC+GrxRgAOBFj+SgDCC84MkQhUCJEIOxAICGoBIAoeBjD/Iv+v/J39Evho9gL5rPVw/M33svZe+s36Zvqb+az+uPy7/k8AsgCQ/rgD8wNvAQcHagWmCOYEIATIBkEAcQK/AqkEvgGSA3QFLAEWAyL+oQC6+Xb9qP/D+Ir4Gf+/+Qn2lgBt+vD9PQC7/lEFEAR0//kI9QZyBogDwAPPCp8BgPVHAPMDlvIA9FP4Svy/9Ez0I/3r+2j7ePqBAFEEiQJ4BgoIkAyLC04Nqwz/Cw0JoQEqBfgBagAZ+1z9Hf0d+KD6Qvs19nv59vrk+B/6Wfrt/Bz4HP0d/b7/8ALY/jUDKASfA6kE2ADzA3ECNgE4B0gD1ASMBUIBNwLcB7r/kwFgBIL/oP/p/MT5oP7t+ivxu/2m/tf6BvqT/boDvv6i+gAJ0wfZAtMABQd5CjsD3v8YApsJkfqR/bj8KP8I9hbySvkW+v74s/Lx/Mf5UvvN/ywENAU1CVQJagoUEO0Lsgb3ByoI6QRmA/4CAgDT+jL8kfi5+lL3xft1+sb4QfsI+wH80/nM+2/9bf4y/BMErv2j/CwDsgMs/nAHywObAeQGJgLpBncBngMvB0ADRP+PBvgB5gAU/Wf+PgSBAhH6bfsWA074Avas+WH/rfki9o79xQTh/tT8/gS/COMDLQZMCe4JTgRM/s8Cx/4t/hH7yfs6/uv4mfWH9zv1V/Zp88/4kv7f/xoIugWpCX8LUQpHDVULDQnIClAFjwPBAiACKv8r/pX7N/+J/Zn2y/098wf1bPpn+DT6Mvtk/fX+//+i/WX/1ALO/fcBNQTT/5kDrQWKA5MCVgSnBnwFqPvDBMcGYAEa/7EEOAax/4T8hgDbA2z61PnQ+xwBtPeT9rH62v/5+BT5ggIGBR4EpgFgB8wGmwWMAwcGUAIFBXr/4QKs/V38n/ta94X2SPYR9+f1kvtb9Zj95/3QAK4CSQZNCLwLbQdJEugM+wPxDXgElgLKACYCVPxW/Sv6ZP1s+V35+/rz+Ln2lP2E/BL39/4y/AX+V/1WAisBEwHn+9D+QwXkAWz/2wTlB/sB+/7OBp0KowAHAPsFGgkvAJb9EAHlAWL7Y/o9AcoDBP9N+xz77/3D+Hj0bvyu+lv+Sv/bBXcD1ARmBOkF5QUQAzoGwQFEBb7+swDL/OX5APyW9371IvuC8x/5u/pu8cD/4P4t/90HwQVADVsO8AlNEHEIkQQiBG4EFv8fAjEBBQCq/Rb/yf3R+BT94vYz+iz2MPgHACT5F/WGAYUAUv8V++7/WAWK/OT/swK7BaQE2AHcBMQLpgAt/+cDywZzAcz94gckBf79nf07AqoAKf6k/E8BZf1k+6D5+Pcl+0r89/qk/TwE5P4zA/cBowEgB5cBPwYnB2ECJQhRA7b9v/6Z/kb77fho95n6H/bp87X5MPcw+5/7uwKZAlMDgAn9B/0JFQzjBzML8ws7Bi8G7AK1/5EAZP21+Cn+MPwh+vD0y/cYAUP2MfWkAI/+Sf5g94oBfwKg9xAAY/+VBg8Cx/47B2QGBAFB/yoCUAjlBKf92wU6BU7+TgN+/yoEgAAw/hwHDv+U/qf8CfuU+J/5KfnT+oL91vvZ+9gBwAAeA/0DqAMEBhMFDAfPAkkDeQAvCPUA5P4z/rL9+/uD9EL3sfXs9mz2evmD+Zv9+QN+BcYDCAsvCRoICwhVCpkISwKsCHMFSwVLAJoCRAKi+SD4DvmB/cb3mfV0/Kz/Sfzh+G0AE/0M+mb2ov7rAY797f9+AtkKY/4rAt8AoAXqBsv+uQQfBakB5wTPA6EE1gPN/y8Cmv9GAf77hACK+oD8xv3B/BH+uvsw+XT5kPkI/OD+jfxsAU8EVgmKAwYIMweyBmYB3gKx/gQBB/6B+6v/xfgU/gD27fly9S/18feL+GP7cwNNAOgDCwuID8cK7QeWDSELGwc0/gwHfwIEAov4bQGtAgT7Bfk0+s/9Fvai96b8kv10+UD8AfvZAM37qvp5/s0Fzv0dAJEE2wIIBo//twToA4UBDQJDBtICDQT9BOwDCP8HBNoBeQDl/wT+oAB6/F7///nb/nv4KPyP+Xf93P2N+UwANf/1AUYCYwcCB34HIQZ/BqkCOAH3/mb/U/6l/uj8P/zv+F745PXA72L6Hvzy+lT5GwKoDJMDkgC+C6sKTwbNBUQHUAyNBRcBBgUcBP3/Afyr/OH/3PiK89n9bf3297f4Xf3g/or74fsP+/D/Q/46/T3/UARk/0YB/QPEAJwEGgAvBvkDcADRBMkDvgG4ALcCBAV4AAgHwAL3AIf/TQD+/S751/r/9S7/RPY9/0P8Sfqu/Rj+zgCiABkFpQbuBQIGkAiLAzUItQFbAwwBNABW+9n/6vbo72H1Avr890ryTPsvAmsAp/u9BucHqwrWBEEKrQwxDCsD8whkB64BaQHK/7gBnvgd/FH3ngDf+JH4B/9p/ej5z/vp+637tPv1/PgBuv1m/yn+gAGP/vcAyQBpBaIAZgX4BYEBzQY9AYgE6wBCAfsEqAK1AZoCmP/fAzv9Wf29/Lz69fxD+4z79/pb+rf60fs//Ff9IwLpAm0ClwmZCOEFKQYhCE3/Y//SAQ8DFv7X+937C/7H+q3yy/aV+pP2j/EW/soFhQEKAgAJgwgpC/gFbAeNDGIGIwWnBNIHqwGV/ev97/0//mz6c/12/Qj5tPo8/A77o/iA/Db/1vfZ/rEA9/jx/LAD7P9lANgCLgX9BDr+0AOkADkE4gBTABsJ/QOVBeIETQOUA7P/mv+C//n/YAEoAej97vc9/Xz3BfgL92n4Z/0T+wsAqAIsCOQCSQblCbYECgKOBn4DBwKk/YYATwLv/Xv4Evow/CDzl/Mh9DD/tfUa/RIDGwFTBh0E2wc+CdEIjwnqBNcLKQbLAC4Fqv3jABUAqANX+/z/nPwd+Wf4cvZf/mv5evgJ/kj/IABC/pAAUv58/CcABv4oANf79AFyAxoEFQLKBScHXwR4AYQDjwSuAvACJwOp//IDSAZ7/CADvf7yAp74JPpH/Cf1YfuM9M35lwJp/7f9MQW3Bm4BKv7cA7oHPQPNAU8IVwQQBTP+JwA//yb6Zfob9aD7+/ON9/z3Cfsz/G798gWfBlcEWQkqBs4KZwesBLMIggE+BoMAlwTMAO8C+P4n/PD7Kvue++T31/qn+xQAtPx4/a8B5P2d+6H65/2f/xX5GwObAXr98gP9/7IBYQJfBUABvgI9BNkDsQTb/wwHKwPJAlABqQPZBz7+zAAr/3D6DP4p8qH3ofuj9qn3kv7OAjkA9ABCA9UJkP8wBu4DmQPuB639ZQXpA9kBi/6u+yv+H/UO9c35jPIg+Tj7gfsH/zf/pAfZAWgIkAasCsYDywnXBqYDGwl0AJwH7wBAApD6B/1N/qH5Qfe8/+b4b/yW/T7/3PwB/FT/Ifu8/jv6fQEm+7MC/f7jAfIBYgF8BF370AHNAoj+hQTHANIDlgn0A4kG7wFkB2gBaP4iBQgAcf7C/IT8lvts+2r2efso/cz5JPyO/iQGHP4YAL4E5gEcBlEDjAJmBdAFUwOsAkwAF/y2/EX4cfgX+VD2wPqc+Bf42/5n/4UDFf+GCJsESAJeDXoGQwb6BB0KXggmBdf/DAMU/b/9//pK+0z99Psy/U/5wf6q/xT/3/eO/zb5gP5g/Mv8Zf5y/vsAogFPBGn/cAMQ/McFdv6o/4kEYAXPA4IBlgWSCu8AUAKhB+L+UwS4+yoAdP1A/wX7R/tp+6/+j/Xi+wEAgPY9AJ0AOQFOAhAELABsBxMF0wq8AJQJaAQG/ocAgfhn+UP6gfqt95v8mvTg/WP3vf60/Q/7lASuBGsJewn6BhEM6QfE//gJpwNSAD0AKQIC/SsDMwH5/Xv8jvzt/aT3gvwB+U34AfyX+LD/pQNy+ysDvvuiAOf6Vf/O/nr9YAOdAOMC2waAB3AAUQYa/5AC6//gBPMAmwJVArAEBQS0A6wAlvzu/dP8cvuu9xv7hfef/Vz40v7B/BQEGgEbBVYGMwnjBOoBigOHAnQC9/l6BUL/Nf4R+9b+U/aI+Gv2Ivvc9gH9tvvj+5wHzQJ9BMAGIQqWAgsK6wTaCckC9QRh/+sAEAHZ/Vn+gQCd/Yj7MwE0+zkBBPYP+yD9Gv96+uX6NwCjAbD46/0hAtj8dwJg/Un8DAQ0BxT+GAh3ANQDMQA7Bl4Gmv4SCNoB7wImAoECigRKBwz7RQFy/av8lPbd9jH58fRi+37+7vsv/EoFU/xTBs0E7QKyBwkHMgOKBtoDeQbr/WkB0P4m92X8y/Sj9p/zffiG+Bf/mPz6BLP/KATOBRsEfgRCBW0IqAfwBlUHigvS/7kCH/9CARv8Wf3Y+jr+Zvq4/MD5/v6t/v/3lgLh/Oz/+/fg/mX5K/0J/SMDVwExAyIEsgLbA6/9jALI/B4DygDIBaMDxgU4BYwDhgSyBjoC5wW5/9H6yPvE/DP6QvRW/T/9L/nR/ukCS/lYAtr6DgHF/9kH9gMKA1YJsgR8BskEhgac+cL9SP0T+lj7yPed9kH7UvYZ++j5BQJMADr/QQPMBJAIrgdbAwAFRhBmAEADgwWjBMn/Rv7xAQz9zvul/931IfzB/uj12gAz/Tr/d/tg/6X/uPuN+cX/cfxd/kUBOf4KA4/+1gGyB6wAFwQoBEr+nwWe/FwHg/4vBvQGegJcBuIGuAAx/8UAFvgd/9j8g/dQ9V382/gU/HT6CgOk/F8HmwOaArEDIQK2BnMChQmrAQQH6f3/A4v6JP1792X3sPqS8oj77/qS/s/8BQCa/GQE8wGfAUsEywqQBSMFegp7B78GYAJ6BGn+PAIeAJ/8WgBD+wH52/+O/DH/jfku/Wz79fy/+vP7yvyf/kECav3tBDr7QAaeAOz/KvwxASsCqP7kA4IEwP6QBV4GXAA+BcYCXgQK/VQGuP7kAsf9Zf43+aT9x/63+F34Rvw9/F/7+gIq/AADXP3MCMX/oQbYAKMGgATyA2wG+gHaAfv8sgN88Wb8q/kD+Z3ywPv/98r9CPymAGkCUQR5CLUCfAwGBXwLfQMsCbgAlARw9+cD8/+2+oj/4QJUBR/5NgEH+bL+4/iD/hb5Cf8BBPf6afntAMP3zAD4AVr87ACAA3MDqPutBiAEvAMWA5IFKfw/CHoBr/5ZAYACOgRVCFsE/QGcAir6AgP182z+E/Sv+pf8wfqK+gwATf+vAA0A1f+cBzr+iwmS/JkG5Ae1BwEFSwKe+WcEkve8+2T3lvMj/Er4cfuv9jIFS/lqAwAAQAgjAwAHW/+rBbkB2Ab/BDIF7wicAZMKhfqCBUT3X/2o+mf7mfreAvX3ZwLO/pj9pPw5+5MAlfiTA8T2EAWL+m8FJ/2bBTf//AAJAikA1/6cAa8D4f/UBzUAnAvBAJ0NFvvqAzwFsv6L/xUEN/WEAMT90fOz+4j2c/4a9ycGaf3zBCH9DAhz/ZwEN//gAeYIXQOIBFgCVwbh/QP/T/T9/4zzGQD78HP8UPvA9pQAoP7y/+kE2QZiBMUJNQL5DAABcAsLAM0D1AWaAl36CgYs92r8oABI9XwDzPc1A338eP8T+I0BMfkRBRT4BgADAO/5zgO/+1H/xwGKAGj/Cwic9mQP9PWeB1kB3fy4Cb3/TgIPABUE0wIuA/IBLgmB+CcMCfcu+aj8x/hw+O77Y/tC/j4CJftQBH76LgVoApUE7ATHAp4HpwnE/yYFdQGj/8b/k/jB9O/1VvdZ92f4J/0VAO78qAfq/QkCEQX5BSQErwchBFkKKgZwCOUAGwFCBRf8IwNR9YMBsPW8/v326/66+wH7gAEz+3H/dfwPAzr9GP17AGcGePvpBpD8bAHH/FoFk/yCAAADovzpA6MB3wMr/KoHxQJ2A0sAvguE/kgLtvltAxb90ft4/wXzuP4z+Zf83ftNAtH3dAhl9g8MKf6RAyQFdv5tAZoBgwQqB10GvvwgCLDwFwbt7qv5B/iz9WT7Uv49/YkCFgA8BH8M8PwrB08AgwkmAKsHzAKDDxv9ugjR/6f8wQHk9N0B6/ln/OX8v/1j+pAFgfPgCwH4NgDd/qP6VP4q9rP73gHSAWf79Qdc/oILAfvxBEX5swPXApf8r/4CDJ//2QFOCXIASgar/sEFM/w1Ac78+gFb9FwCWPmZ/fL+4vtN+RIAkAB9/iD68AHvCLn5fxAvAnkKNf/8BOf+G/vW+vb+EvK3AEv/9fi4AZD19wCZ8/MCVvvHAdABmAkCAQgKjgVTCSoB4ALXCrf+wwXbAdYA5vrTBZj0Ewfs9S/+kPvA+hb9yfwp+0X9Bv3V/vADePsGDMT1IwrN+1YCUf7L/pr8/ACU+mgAGQUg/dQMUvfWCjMEYgJBAJEIcAH0CQH8Kgey/H36HgF+9X8B//YW/0b6Iv569XkGQ/ZABi7+4QHoCScD1gLPB1gDrwDMAH79pwTg88AFE/WqAdr1+/0o/Wf7ofiv/msBhADxBgz9mQcAAmAEOgGNCTsC9gc1AswLOQFaAM4CpPiP/HH7GvlI/n78/fsuAJL8OQf6+CoCzv1EAsz9j/0W/HX7yP3s+iwBiP2bA24B8ASOAdUBQv4CCeD9qgFuA/EGsALQAh8J9AQ5BZr8IwEt/CQBDPu6/rb3EQDZ+Hj/y/kp/b75cQEM/q39EwMB/hIIiPwYC8L8gAh+AagHN/0TA+j3Jfxe89/1GfvG+TH+KvkTBaT8rQzp+owF4v0hCAEDrwWAB3wIj/8bBdr8mwNwBfP4ngYk+Y4HOPT7BEj5o/4S+vb8u/0P/6f/dPmL/+77HgDy9y4C9v0gAlb4GQ7T9WkIEAbcA0IApPwYBaD5BAHu++kELQLQDYH6txIS/bwEsAES/d35Iv5o+Ab7qP5f958AOfaCCzP4IQph/PADtQCzBGT8tQcKA9UA/go4/vMEzPkrAary/vzu9R3/yPTNAov0l/5bA4H9dgSu/AgOyP+kB3UB/wTCAR4JmvptC4kBiAsf/zj6yAFu8/L/6fKV+oD87QIl+3gEMPrQB2z1SATz+Y78/AV2+VMBC//3/hoCAgULAdUIBv0HCPTwzQd7+F0Ba/9CBLcGTgNmCngBVAajACwBRfXkCAr9L//F+mABg/l6Arv6QvvK/M7/L/tT+0EEevwVDCQErgjtALUIIwCd/y/4jQH59wz56/629y3//fxV/Xz83/o8/R8GZ/rZCMf8lgn5CNkGtgjXArIGzgW//aYBsAHk+dwE6PmC+x4BzPyT/08DzvEQB5n2bgFp/nXzaAKD/PgC+v9Q/XQCrwMb/8sEiPf3BGv8gfx5/R//yPpfDH0GJv6GCL3+hgrt+NEAQgGL/GEI6f9V+L0L3vvN/zEBPvt4Afv1qP2N95H7Nv+SBSECugtSAnEFQAei/OUBgPq2AEIDvPxS++z7X/pw/hP08P9Y+o37kQBD+zgCAP/R/zMNSgX5BUIMugL4BVj+IAFI/40Ep/90/EsAhP/p/Uj9+//m+08BqP8o/wj96Pz3+6f+1vzD+9/9XAO+ABH5SQQ+/g8GIgLq/iUDWv/CAKX+NQA//ToBkgULBAsDBwgEBkD+JwJS/Xb/vgCN+Tj8k//8/tYBy/ej/aADJvW2BWz9MwSaA+sDu/60AvwCOv2dBaX+uwSl+0j/s/vi+R/++vhi+Av+SPiwBbH3wQBDBMj76QjX/QsHwASXBLAIGQLn/ZYGWv76ArYCxwLLAmT+pwOhBPn6B/wGAHbtX/7/92787v7gAMsEdv2RApAGtP7c/moD7/iEB6L5/v7I/VH6hQPYAKQBqAU9A/n/5v24AN0Azv2HApQI6QcBBjcHcvoKAPD7A/sm+d38FPt0APsBBP45ABwGV/rf/ogIGfwkDvv8Uf+sAZX/cwRg/ET5NgEW+VsARPzN9YX/IfiX/iT4tQYz+RgFp/mFB3QCYwiDCMoDvBGZ+1ULiP5M/2L6RwSr9QwIlfZpBXf6XvsDAIr4Q/6SAMwA0vqACwLw4gUY+m0FI/cqBW8Efv5vADYGoPcGBu380/4+BH32GQ4B+RUB+f5TBIf4dxCB+s0KSAJtAIkEhAID/4QDewA4/qIBt+35CUv1ugIR9lgHDvzyBEz/eQAWA1ACCQTp/i4KOPtJAsv9Bv/k8YAAz/TC/zzyrgCh+g4AcgUw/rP93wnCAfv+fwnV924NcfyYDsr+TQ/MA1UADAAgAHX1oQJj+HX7wP8F9dgL9PdMA436ZgBm+aMFl++/CDr2UgGCBTT+dAViAqoEKfzEAiTzswd49QcGSPczCAEFkQKH/x8EigDABMgE5P6/AnP/jAc88bsIg/cKA038lQI4/PIDlvnPAib//flABtr+GAsq/BAFNgIDAU769QcN7hQJlPqn/kL/k/cy+BX8Pv7U+Wb/Cv+bC6z0ngwN/EwGkAkmBgT8tQ/P+gwJBv0M/z8E5veNEODzDAYl++oCHPt8AkT0v/8O+TADpPht/WUBSPorA178Nv7J/egHg/JJD/70DwiOAZ4F2gAZ+s8C2gFQ/QUBrP6DAB8L7fm/CVH52wuW+fEJKfsBAkr5UQRJ/Br80QVn96AOPvO2AZr79gIJ/O8DKfmNDP//Zwh3AYz7AQJk++wBp/Sa//zxbwXd7wsBrPhMBdr/f/73A5z77Ait/v0EV/8VCuwAvxNB/uQItwOGAdj4gQB0+T72vgm09VQJAvKrCNj8Of3B+fEEBPk1AyT+EwCy/fD2pAd5+nME5PoTCFf71gpS7pcIk/QZB/T6oPtqDCr5kgv+Ah8HhvyYCJz6OQr38BoHTf9qARwGav13/kcGXfvu/XH52PhGBZjyQwyH+RoFUAQWBhH+IQmw9TQJbfuQ+NQCqvPWA+zyLAAo+hn9SwCTBrnvqAlK/h0B7wBY/y8CWQpyCmAFdwXW/7AHtfYAChX2OgAtBbIALfr5BtL9hf8/900F6PvB/B8Fuvg3AIX54wHV+IsDNPw1A938LQV+ACEBqPmDCKj0rgI2BI/6eQRSAV8ItPWYBWj+MwaA++YF4QN4COQAJgP5+uIAO/1A+7f/FPm8A4/9bQru9ykJA/jpBrr7j/+YBAMBmQRR9ggIMfgo/ssAQ/WI+3ABUfXsCp3sOQh4/Kn4Lgaj/1AEfv6YCpMCkAQQ/XcU/PoNCk37cgRvBrP+ZvlV/FQEGvTyAVn8iwGc9xcC6/56Arf7egWr/ef+Bf2r++QASP5wA6j/ZARLA/r84PjdCIPzxAKQ/T79gwDpAdwGH/iMA7IFzPzXBYEIT/9QCHr7wAS+/K/+T//B/PgCX/18AtgHX/kT/F4FV/nIBsQAKPdOCA//2vhJBs3+lQNC+WcFiwH08mH/7fcg8az3zP3e/PcFdAUCCkEFVAOzAAX9XQeuAsf/YApyBy8Dngl198MDe/cQ+Z8A9Piy/Q8AIQWa+UUB7v/fAcf5xghO+OX7HAB8A+j3ff7ZA3wDBwWDAZcHCvFaCWTyVPpCAekDQ/rWBhgAbv/OAV3/owhS+SoHMwMHA6IAaQCC+3gGjPz9Ba0A4/fCBan8uPYfAzv8xQsQAZn8GQpW99UOjvlD+RsDIft3+kv8Q/q4//b2sf6VADj83gVg/VQCMv1mA8n8uwQ3/f0OMQE0AvEKS/1aBDsAUgGFAB4FLf2tAEv1ywUU/Sj7of1XAzT9Zf5L/WP3kvz3/7sCt/hwCov/gP1VA4j5eQDoAMT/fAYC99gL5/sd/hD8TfzUABH/PgcSABkILwCNAbv7ZAdW/nwBbAEaBdgAdQPw+FX/yf1Q+agG7/sKCvAAvQIg/BH/4QG7/rn6BAnY+7kBLAUr9Gf62fmR+nj6FQIi/+ECnfXIB4z20PwxB2L+KAW4BMQEKQY1CMEABAYK+u4LL/+f/9kBpvaQBRn98PYXBBH54wRK/qP1GQd3+ur96AB7AEX3LgcY/a39SAOa//4Aiv1cACH/O/tZA0oCfvokBiP4/Ahw/WT/rgGA/nAF+gYQ/wMAPQPQ/0YFMPdZB6b8U/8K/JP6x/7YBhr57gaL/6EDdwRf+z8GEvncCP358AT1+0kBk/wa/n/5pfqF/HD25wPH/db5xwYJ9+3/HwTW/HII5v2pDuz+bQnzBQYC6ASLA7f76gSGA4799v22+ygCv/ex/378nABy/RQDbvo3A2f6dvxNAbv7NwPy+xgEQ/+i/h4AqgCT/rQBHPKsAbD+MPyBBBsClwGaAQUBpwFQBcH5CQZs+o4JwwO4AK4KZ/ujAEz8C/fiAgn9WgGxAZEDpv1cBF4BqP6wAmP9Mgyq+MsHSvjQ/nT6q/rZ+RD8vfzg/Yr2rPqyA071FwfQ/oIGFwMLBf8AcglY+GsMS/+S/wQF+/6Y/YQDOQEpAYYA3/0G/xj4Jwgk7U0OtfpgCyX+gAAjBYwA5/d+/hcAG/eKC6f44wHw/iAGp/MtACoCkAIW+p8BH/X9/ez82QAvAEP9dAsXBIcIKwWDAzr7igg5+bUPTPVCBDoASvpEA3D+xPvvA7AFtfscCan3wg8Q6ygKt/g3/D4A1/cM9tYATP0I+5sGUOzlEOvzsgQj/5D8xvYPDXz7cwYo/gsKqgqL9H8MXfibByH9WwZK9xYNq//nAgX46AXD/Oz2HQYc/Mr+BwVL/yEFAAk59IUO5feeCZ/3CAMGALX6EvpR/xv5l/nTAIj0VQTq+k8AMf69AYL0LArD80kLk/5mAS8JIPzSDmL8awffANIDDwA7BeD6LAjd/04EfgD5/t8C4f4Q/M0CyP1f/UQMXfRAA9X1PwVF9D3+1/2x+7v9Afyo/pn4ZAT2+P8EeP5gArn+qQjP/H3+zAAhD8r13Qo3Al369AiM9wwA+f23BbT2Qwnf+pQJtPe9B6L9FP7g/1AFv/4OBkwBRfnOCHP4Kgsx8NwGVvWPBPT1RwCT/zsFxvk9/VH+2vWI/8b1IAAM+asGOAKbDSz/PA4w92YTP/m0/1YBVgBSCMP4NgViBCYB1wMnAyT4hBHZ718I9/hXAkX9/AGt+aIGWvzk/TD9NPJxBKTu8Pvm+JMBJ/EfDqT0ZxBZ+lQGwAf+ALEGJ/49AiADigRD+bAM1PSREXfu6wtC81L8wvxqAD38Xv88BIwDow9W+wER7PPgBvr8TQA0918HYvakBJX0BwZ+/mf85AMk9FAAp/3//1Hx0AXf8NUI5PQUDxb+FASwCVYDKv1XDLUAdvzkDN/2agwG+SAIJf31Aa39ZwbK9YIPcvNVBvkB9fziCbv8/Qiq+FEDafMKBvnpcQNh9Hz8Yvfw/QwEDP5/+/QE0P4xAGoGsfcYDvn09RCM+SoIJwXKBaj+pAJU/Jn5bP4N+VMB8ffYBqX/OQSuBM0JyPMTEE33mgE+Avf6rwtS9jYG6/5R/J4JxfkD+4IB5/Sq+0r3g/5t9aQDhPTWBvf4af2BBTH85gTq/G4JkgXbA8gJYAOA/P4NKvvzCEf2cA7EAXX+GwUB+4ECf/7V/0j9Zghk8z4FnPYZAMz9rPQ7B8v7PvxrAaH01Qg591/4+QK59wkEhwD6AKMCRwph/ZEFUP2UBZoCgPwJB3j+yP4EBRb9zgAm9b0BygIY++sKu/sXBkMC5wPv/64D4wPKAjf+d/uFAQ/69QB4/wP3Awgf9gIHjQC88PsMK/JGARD3//h6AkH0EwKm/04CtATLAPQERP88BHoE/AIJAz8B+QjCAKAJjABF/yoEa/78Aer9bAWv+XADUgPW+g4HdfxD/5j7f/dv/jf2Kf8x9MD87PqX+X8DdfoqBnX7LglBAcQBLggFAy8A0gT+AIEFzgD3AhABwfitBJ700QA9/HAB5fnbCeAA9QHQBiAAcAaH+6UFIfo+Bsz8KARr/+UCDQSh+Zj+4/gZ/CH9+/hm+YQA8/sd/nT5NgRz9ogDov0v/6wJu/nX/T8FSQaR/sAFbgZhBQb7uwrH+B3+mwsd+eIBEBLd+toGUgJb/yME/PVlDa/w0gFwBgD0BABAAEnxR/vX/Sr8mfomAI/+DgGf+9j+WQAF/ssGg/ogCZr+TwVaAl8DSwS2Cdr9iAVS+7z8Rfuh9Uf+pvqPCBIEngS7/94K7PWmCND5IgLeAO/7agcB/1ACiAUU/LsEw/xY+qEDLvJTA9ztsQQD9iD+2/uzBMMAB/6f+cn8GAZX+lsDiv6rD/wBRBAoBVEH+gMr/9L6gQdQ/ScCT/n0/88GOfvvCGT8lAei+w8EmP8n+1X2z/iM9Uz7KPcHAYP7Rv/8AhP44wQc+mAFsPpOAgMD0QQXBKUKbgGQAxYHFv6O/cL6LQAs9OoDUwLyAd79IwfN+18Gsv/EAIcD5QKuBPD6uQJx/+8AQvckCQr7HgWo+0L/E/t+/Qz6rQFA9aj/lAHo/R8GBu/+CDvz9gLG/eIA0/ujDX/7sgTN/1UE/wPs+O4Nkf2BBf8EoAeu+oMNff0JDi/6jQ2v/Ez7UgPk9SAAffiWAc34UwAv+OMAh/EFA5X2FgGe8LoIZ/jPAZgBEfkwD+7yoxEf+ggGXwPIBzP2+wqw+gkHTfrFBOoCTfSCC2H8S/1a/KIJEftJC6jzWBIa8WcMMgA3+IYJHv1bBJj4dgHO/YX76P8CBvTukAggAE8A//NbBCzvd/03/nf03gPg+9MLo/ylAW4MSwL//9gDvv/YBc37AgaoAGcBAgdJB9wAUgj6/KgA5QHa/VwK+fRnA4ECuvj8Adb3Y/gx+ZX2dgMx9EL/aAKf9pQCJP/P+5gI3f8uBHoGwvz3CiL+8wETBUH4fwSzBmH3yASv+mz+dwLl+F4AwALqAkT+Uwfg+6gEFgFV/+YGIvyUDXj4mQB5ByjxBglW9RP9yv1J9qAHUPqp/moB2fY4AGX7T/k8AfL7aABH/ZsG5AXa/0L/yQFMBNkALQIjBWMDEQIyBPUJYwPq+0UEVgJQ//4MB/s1BHAD4vjHBHDwaQu/82b5yQMy8FIGUPdL+K/4fv9hBFX6KfmBAmX6K/+uBEX9JwUaBoUErwYuBVH+nwnD+goAkPl4AR7+1/+U/zf1jg79/5cLY/j5BSoFrfuNBIT8gwobAWwAoQTr+LgFs/1Q7/sCcPj0AJ72QQC9/Qr4NgOE+NYFt/hRABEDm/xzAH/9DQLyAyT/PwQVAiP/wwUdAtb/DAFnATn/owdP/1UJe/pSDFEGUPt/Ezz4GAVj/BX7nP+P9W0CB/ti8ogAp/OF/yr3e/hJ/5z4egiV/uQEswHyAaEEmPpMCMEDwQKBAlL/Ygc78fwCQwVN9sr+KAXbAMgEyPwOAuQB5fnZDOv3mQrbCSP7yAlg94MHpvVc+LIIr/izAcb+pgKMAY75vfz3/Lf30foJ/bj5k/vBBcvytARRBOcE4AMVAFoJzfnMAVYADwC7ABsDtgRCCB4CgwtoAmn/cAX+/0AAMgWiAdP9JQIF+oEDHPYZAaD6x/VCAWvwUQG88hcAIgFp+8oC5v63BKD9pgKI/a8ErvxHBx4DgwGnBF4EN/w3BQH8ff86ASj8qQdr9JgLz/mwCI74EgOvAtn+cwaw/o4GK/1eAZEJ8/diAAYDjfX4CInqdBIE87YBjwFu9A0H2/Y1/3b3M/nF+SgAH/nQCt787Qa3/68E7wQG/sQA8gSv/dYBsgsQ+ogLtABkBxwDSAcLBXADVP4LBDcA6vPZBQr1NwLV+zn8IwKt9Kz88PzO7QsHa/wz/r0HqPi/Cn35yASb/7MBuv0cDPsCYQMiAD75GwVk830GfflZ/3MI3wFH+MkH0/xpBT37ZQadBgv8DAlO+7gDCPyTBrr3awvc+AMDDP+n+gcF0/fj/Mn7cwFM//787fTeA0/z3wLn9HX/uQSb/dwDcf1QAMsEDAKL/oAJO/vBB9cFuf5D/1EDZAEBBs7+qQof/hgNAwO4/dcDm/zUBw/4Gv+m9nX9wvbl9RT22//D/HwCPfnF/7/7oQJXA6D5ywdRAUIHMgA+Ayf9FwQBBi39M/6YAxX97ACJ/Zb73QAsAaMF2v/8AnADgwMpAj//SvyNB2UBl/tMBGT8ggVD+4MHQPzC/2gDCv1p+ov9Zv9x85cF/PJt+p4BCP1n/eb8x/ypCiXzgAqT/xX7jAhq+tYFN/tACMAA3QL8BDAK+P6LBuIE6ATBBL8DegTMBOT6WQbx/ED1UQS07z3/cvdE/Ib76fppAfj4jfdMSVNUYgAAAElORk9JTkFNEAAAAEltcGFjdCBNb2RlcmF0bwBJUFJEFgAAAFlvdVR1YmUgQXVkaW8gTGlicmFyeQBJQVJUDgAAAEtldmluIE1hY0xlb2QASUdOUgoAAABDaW5lbWF0aWMAaWQzIHAAAABJRDMDAAAAAABmVElUMgAAABAAAABJbXBhY3QgTW9kZXJhdG9UQUxCAAAAFgAAAFlvdVR1YmUgQXVkaW8gTGlicmFyeVRQRTEAAAAOAAAAS2V2aW4gTWFjTGVvZFRDT04AAAAKAAAAQ2luZW1hdGlj", +} +BASE64_VIDEO = { + "is_file": True, + "name": "test/test_files/video_sample.mp4", + "data": "data:video/mp4;base64,AAAAHGZ0eXBtcDQyAAAAAWlzb21tcDQxbXA0MgAAAAFtZGF0AAAAAAAD8BohEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8AAAC4gYF///e3EXpvebZSLeWLNgg2SPu73gyNjQgLSBjb3JlIDE0NiByMTFNIDEyMTM5NmMgLSBILjI2NC9NUEVHLTQgQVZDIGNvZGVjIC0gQ29weWxlZnQgMjAwMy0yMDE1IC0gaHR0cDovL3d3dy52aWRlb2xhbi5vcmcveDI2NC5odG1sIC0gb3B0aW9uczogY2FiYWM9MCByZWY9MyBkZWJsb2NrPTE6MDowIGFuYWx5c2U9MHgxOjB4MTExIG1lPWhleCBzdWJtZT03IHBzeT0xIHBzeV9yZD0xLjAwOjAuMDAgbWl4ZWRfcmVmPTEgbWVfcmFuZ2U9MTYgY2hyb21hX21lPTEgdHJlbGxpcz0xIDh4OGRjdD0wIGNxbT0wIGRlYWR6b25lPTIxLDExIGZhc3RfcHNraXA9MSBjaHJvbWFfcXBfb2Zmc2V0PS0yIHRocmVhZHM9NDggbG9va2FoZWFkX3RocmVhZHM9MiBzbGljZWRfdGhyZWFkcz0wIG5yPTAgZGVjaW1hdGU9MSBpbnRlcmxhY2VkPTAgYmx1cmF5X2NvbXBhdD0wIHN0aXRjaGFibGU9MSBjb25zdHJhaW5lZF9pbnRyYT0wIGJmcmFtZXM9MCB3ZWlnaHRwPTAga2V5aW50PWluZmluaXRlIGtleWludF9taW49MzAgc2NlbmVjdXQ9NDAgaW50cmFfcmVmcmVzaD0wIHJjX2xvb2thaGVhZD00MCByYz0ycGFzcyBtYnRyZWU9MSBiaXRyYXRlPTMwMCByYXRldG9sPTEuMCBxY29tcD0wLjYwIHFwbWluPTUgcXBtYXg9NjkgcXBzdGVwPTQgY3BseGJsdXI9MjAuMCBxYmx1cj0wLjUgdmJ2X21heHJhdGU9MzMwIHZidl9idWZzaXplPTM2MCBuYWxfaHJkPW5vbmUgZmlsbGVyPTAgaXBfcmF0aW89MS40MCBhcT0xOjEuMDAAgAAAMsJliIQFfJigADijJycnJycnJycnJycnJycnJycnJycnJycnJycnJydddddddddddf//8FxOAAmKZxB5GdbBJ0I/qo/+Ee5/93d4oOmgATyCOPs0YQeSU9gHogQgiKkeTMGgzhtmA3WzCcX9v9GB1FRV6izBeETEN8RUn4Je+68aKjADOf3ubYk08AHEtZSwC2H7GiIqbM8cRd43GpcARMxEOpH4KRIvGRP52KgM7jxi/EBunL+Pb8Ix+/7jerkCz/QtRtUideSfnaYLRJSz3lB1RvwBgazm58BcNnMliUz/zW1WZSYFyQG41SL6ow45c4iU6r7FJFPdK8xe6yyxBmrVixHdQkyeS9T4AwgVDLo7LoTzET0SdQjjirUv+BAXdSd8IboCpR3Im+IIKrnmRguh/9L8WA1irxxWN0JvUNIu8nNqd/b9ddBcVcsuC9IeBMTymfewA8LtG7q2wAa+IwbQA9k65iZLgPob2eFnnDBcagqMpt2I7/1VZ1Vh27BryvRZp0fhRWMBxiA3eVGMJY8H/No5i//gMZ5poHv9ddddddddddddddddddddf/+Tk8IDuABDKTM9BI7pwAHwESgL/56gBTQGTkZfwAHUghT26wGNHy5ieDNIBFU+qSAeyFMKNEmAb0DvqGnHGb+jFMYIAT3YDOggSMfG+GPCScBAvSHHWgsNL8ndz3dnFPgAfIEOeu0Apw+TLDwj2nBaAYQiqTyG5xRyeZgaBXx/gKKC//4BWA8QTisiw11pZXteZnofZgQQR/qMOwbgv7hvNiUQESQhGALf/myLwej3JG1GwIEkX+/CmyBBflXC9Sl6cdQpi59oqlWHzUueWwQe5ggEWJAkH4aw2KPjGk7t67AIQeUIrvoDzCv+899b8QJ4uz7k79djgbBzQnVsOrUuJAayty00xMJlSDV0VtZvIqqnvBs/7ji7WDR39wNZom+DQ3v5PxD64pyT4PuPL/1l0/j8acTZmZp7gQdDHCen6PymgTN1zjuEf0VeQ1JXF2cjJqY8imaqG+4t3t8UdVEOPXNODVzgfbk4h5dvLnvPP20Uv9S+7xQKtxZRuBeKZFzqqMDGhMjcftOTeAdlwGOH+T8AdBG1C5w0i/v7BvCEdnYm4KFog2nYrtyV0EXdxvdebsMw2vne/FK1TK/2JTQHexJdEg9FKaxQt2mB88PJ0av7/AOeAm71/uRNi7ZU3a8a5yI11EktxpGhGl0uLWmGxtN8Bu+rJmjMMXTlGLqvue1sF4nRav3bdVQrv1QxGs0dEPWCMvup9s2pXg+N6cLxIGBZz5Wpmfpt0mgQylEeOVFPzReR9TMt9IYMQSVZaxzw/9TTQyaHfdUFVGovPWcCwM6871GyOSxd/XLt6ziDrViqIqgY6b4GnD7lxqTcST5l6CiB7UGoHAzkoXlcpqNx5mtvb6qhHU8UeKE0OsVm80Zzx+lrNJmPE3I56lGLLSKPzBk50VHw+AmyNP99BHL2Xj7I6wHIcBRBquSR4DLEZGqM8r6v/mdc7Bb1umLIBjfOeglpBU3w6a74MsxqLrrrrrrrrrrrrrrrrrr//yImhAIcACxOAfUhhTMjEAPjEyTgAOwhpL21pHBa4xPz74ADiCcFmJrhUNq/7tNtj+cuoAQGC//nGxva5+690BkbtgEMDwPgiMpggBGINge3wExmw0cfg0CEHIgwAmzPSx/FBaU3yImsz9GFg4ADqmAMsBCoXZqRH/2mNedevwxSI/7aZnj9mNmYT+nh4EgAXist+hzc/NGYb2TeZ0Z7i6aG68KkfCVfskOLagYheehm9P7Pd7skEOz9+74o5EqlVs/oTKb8EGnYIAELrE53D79YkdflH8hbvq4bs/j4wyAwuhGYVtXq7YmUaik8yVHntqbJg/Xn7UaHOID7AKbZHHaNod+ZytfRyQcpik5q731gF67NGY37A1SIdPgu6iT3G7fHi6xEKB8/dFgNXEfqGOmMbuJTMV8t2ZGskPyMfhfrav+3lL8+GcHvXwzokaeCcZRDjbBQI8o463E0CkplW7++fde5Wjhv24r/TED9W1AYiQiMmIn9cfLYTb62/fM1uLwAXS9dq3hunpx7JmC98FD5D89/Yh8mRmAJhuhg1cDMVeGrc+xYMQv2JWgiZ6/7ks/zf9nhMnf0ctryrGXodUbuDtoFAUu9tPf6dZDszkjO6BLjnb2JpF7vjm1Chv3i/7/MxZMFJ80CN5PFcununmH9W7sHXJ8exHXU+OJrLru+QOfrYjkWu24T2DO8SSuApgRG0fEd+hKEkoTvy4MLvdqxqpMBDGNBdzPv/sf9lDfjYXYzX1jfoewVr+UZGTfMqmhQD0/QY+HZ1P2X2mdQE75GBXXHHIGEYCgKJDhFqme6sSEQdUAVEnI/d5r5W6f6Nv2Yz/NBD1tvOEladUlUtBf+HKo26DFSmJ76rxu9UqGo9l10/byG85jdRNDWlBWWAAdQm9/g29t2NnNUGpwELvnVspmMYt7548FfGs2E1eY5lcd7GGGgLQ1n+ulqgwBIysonwZHmw8dIBL9Pa7fndLPH7KuO05gKZZT1vzI0M1Uj0Sq15ntTDQLWAVHCU1ypQ37EcLnbXfcqulbCXD7ZBEbHF5IOl7cg39+f0ME0seX227NqSQ4vapL2GaCtlzgx3Wu5973sITIgqbwSI0+vh4UWomuuuuuuuuuuuuuuuv//s2HB3ABE/8r4gOAgcJllJjJYaMwxK3/4AEuRGO5t6/7/4JCHb1QOSG1sORf8EF3YIBIQvAJjWwP24AUtzcIIZYmsDMdgCXIAB0k3OP7BWF10jBIE0PQp8FtY/Hg7xiqnus8Hz2oWj3wQj4r5sqwDeyyVhuy3U2tLgn9EUewCATFvJ36lAqDuQVrzveA/re/6oIH2/JHp9C2yb0b1pGSQNe6vBGAUBBrCAQcJtAEzNtsGgkFyH5rw65kFGJ7FY8IIPkXt3WUENwFDMier2666nTIF5K4uc/NhdpP6RgyGhlsqdiGUbwXYe3rzw78yb2Uf+TqrQ+Hd0w5uptDCt7/3XcpHGgAHfh11xAtRfx+nfdIKtYfZq/f3AsMQnfFy0JG07qvzNIv2KjfHH3Arbier36aKYAJfocSzuMAy1rcYvVOKmbPudrvCH5qhl2wnMtj5/dYexDpqkGrPBB/oEcXu/gFo2mD2pGpWSl0DZoF45czID8c4IiawhTAy7pQhPyV2VSrlyQb9s8ogwzgCnkQEB7vaRQu8vp3Ba2e/kj3YhrLud+6kaC6/BXvWQSrevBpJCRX38RPqF9CwlAT1gBNI40Y6J+hoYDo/R3kc1iV7clpjivESd0EziRAJN5NCOeW5ADPdWTMj/wAbVV42vSm7B4ZP5eJ69wBZRtw3WYbq852n1L4m3lwvoAk/luOr+fZJ5vHDw5/UKN6sW1NGPsgvEsVWvRWrHixH31CfVbkhj5IL7TFpZxjaq/Pp3FGJ5kWOW7b0/cbkLhCZBWFe0xFa31I6v7Vz1HuO6fJtQpz7BEMI2UAGrlMhxd7ZnR4MZ2g8Q+PZ2kH0wbGg7ke7UZhuDUrhbl0GOuxsbOhOzKDsSQBz+lsUL1uovzWFPyBhKkX4AJWpGRiPeihqpCf88MjnUS3GkVo32pvrW/WK3clmOe7ZmPVN09//3u2G8RC5iL3qGQJUo/hqKc7KNC2sc6gUWBIxYjiSbmVqwtzrxeNoDnRGvq9ckRyk8QAAPKYuQdadKxPIk69XfKR1K//p+/VktAQ91nn7vCKdNH5f2i3LVP4XA2ya24NNT5meN6XJxilH7POb8YxQs7kLtdOhG689vjSugJ9ks4FzmH5eNvLcmyhmL/INtO+FT4Fu8wdoRlGHcmuKFowbfsGXc5W4D7vjLSmvVTtesW6kFmgVeHRST+9CEfyd3RWqxvcnARmDUwIDJsfcI3Wx8Ku4AYRXkhoxmxmB8ikV1QlvxGleNcBdRGErhoNn3ysGkgGdj6vq7SmkHF6wd/ACZEI2M9fqiy4aURePJrTfLlmlfq2gh/rNM5IDl4Sa75QJ/cquJXDff/0p9gtEhVXU77Xru96lrrrrrrrrrrrrrrr/a21vJCXAAVwk3KFWQIsmykBaZ3S4GyLNV/6jCJlFdH34AGf0f9+dQqM2Nhm9dygDK1bAjMPb98AGEeU3GcSIRPUigHbSBf/+fG5R5WnAJ9pOy8N9ZcuAcdhlBJa6jYJFtwfhZ45Sj9hG6LPPixVmBmrYJsA8Bbh+z0S39d/t/+JEVfv5PiH8eX5jZ696xZPn5yXb5eHlGJ9rjTDUpgRDW87FHUGxSwG9gYF6jL+3P5Nyo58irDt7XmmoGoSTu994AWqeEACm5Fh3EJ2vyimqrZOUI+MRQd7hh/7bL7EKdWVHv4ISgDCIdGk32oZrhfOa2zkkayFH6wmvsHNyc9zkakIpqjjIIOJImguJJfJISdC+KLQ6MHrLYAN022D6h8cpjcQ//FmV+nWk89B3e29RHwffx+mmkU2V7/BS1TT1cGu1mRsdKAd92OuvRvaEOXoPJp6ZearPjgWvg4UgwneLmzvoslIGDLMnaWAef73UTYhUmRkvzIq3uEzhqfgCH6p2d3/lt1fhXW9CZbwIuN8/DfjbC53srRhBdQTCtVr3HuO53C4G/tvT+Rjwhn/12h5kahwKM/1ng6KVd5ojR1+CAQYgkIIVbt9N/8As/KQY3BXmrn/GlDI+QBkdP6bXJQQYXGpPesvmiL7t843O+3sebkM7Vox4bmub+nwk2GIEgBQwBmz6/PnM2uydR7EWFep1gMogY4q9MvfUvU/TbzhjmRmXxulD0Q51MUtlZA+YB+oc4e3FTqxxfWJ8SWn82ZzazWt8MQpcNOp5SCFuWdAPtc8DZfF+n6SE6OI39TsuPHP83lrlv5UKqCiKvt7wYHdlfAHgwLmEaglstB0j2o4hif95nE2J1FqOSQA9Zcx+FtBou4X13oUxMgKsxkYJM6v/6YyJ745iXvbfpJFjYWP3eTWHLkKNUSLxp+C2/6lVG/73Xpygx6VRn/YqmP0yU637BzYVfA6mnNlE0OW/wo/7MSFYS9p9a8/UlOk/UekYwf04ztrMd00Xiy92jARVEa++YY4HGAFCc+o+tu3DYqTc/J9HMLShWjInpOWrgiBqzLJqHMP4x5PUoEmLfg5a2P+8bLIDPrdcDVjN0ygB/R3GzQsqYNjWG76yYkHucSuCb/p1SiY3q2xUYZ5zA5lOvy9LTfmxDj244S/n++3YsA5DCUXot9q7Cr5dWd9uJODe5cYDBb/Pk3sVs9pNB9yJgpDWQ/yc3eGgAPwyBaGTOH84/jHn9X6Ue5V1cG8mjASmaqxYT1/UIbQasFViFDo5Nfy02NE60IJlyXRMm3clmF0vAcGfQiBb7STBH0DC063kQv51a+FPubwmWQUdS4EOdGCmDv/eEcQaxw+wGbP/eR2ikA+B0+5YRzohlZgXWco3v/2S0toh0VPf732vAS3A9l1O7Fg0rAXwFTrqCqwD0UNdpsp6KYME4cDIIYAKzy+QAip/oLyBm7xblv8mg+QaN1CX4Hn1rKNaeKR7smmCOos4u7OYF4EzfxBR7XTf/a+N9AFriI/W08GE12omN7/jqSAdU1AUKCEHJ/u8JLrYn7x1gH13pGzTGruJvWv3t374m/DEPIDbhiJPuzascX9zwsuan0Dc5uV+XwfKgFMFOX6c3nj2e//LncJNmC9nnu2zhnEcAv4QLubFZojbl6vLwBmJrYzPAD/A+8qr6elPgwJOx85+1bGMrnL/icYSYIwMI5/1VJPTLqJrrrrrrrrrrrrrr//+EF2CR7tAB54AWTqAcLGF0icpdAHAR4EZTee4A7TNFnrW9WsAdwkAg8JlUkb9SkOLG++76VmEDdzRgGclYMOeh4TU/kSMajwQETCSpDJ3qiETUGUKYPy3/NpKASdgjQ/lh/f7+SwYal/hm5hi3DfKNEwfJ+GcnCf/+OQVJ2k0bAlhMBANX30dmXSsdhTCMkG1myGjAZ48sXQAB3s2pMfbL4eMoU1Hhe/Vtpe0HqR9UEgV/A4oGLszpyOfBYZ9R4vYmCevV9/dyHYJpN74UbDfhNwL/shdxuLlQay4Kloks0ryqPxOpvczhH2/ESu4OAMiA5tqVRj/jeD3wGPa1hhC+nmih1yP19IKLKyMzOog+GjeUwaA+bstms6ISokBZyMJVDFyKUPm2EQcwtKLjdZUSI4AhlYP+XTbvqXbA+lzcPe1y7aPcIic2rXo4AtYVR2A8jAzgs+RnSZe+3aKlnz+y1a3c2YGJnEHdR4SuFLRYUr6pfY6xrGvUxk5x0m870Cz0zWEyvd/YrDWuJHOfusqyC+OcCVbz08gRVcJT/Uy8v7hvZPVXW8G4RGo6O4khC8ONSk0bho9MWMK2cgKMHGBwEHnNzt1iR8W4hm6Vk75ewbNZoufDxsdnugIAzqjuCmvu/ExRs7+Oeqgf5r9FXQn4X/8qkV/JZg87mH8Nh+xq14nyq2DFVcvRaDVyHv90TDWLMF/95Gz/OCSsVyLLKUJufo2JLzDW/uPIrwBw+/lDtXGOkXe4KBM4HBXHYV8QlYSsPSlHdroAqWzcVPyTa/BlMVowug5RWQfqHQl3K645i1662VEm+YVeNWPfgzIiAZkPT4opmbFe5AosXU6yPomNYvOsE88X4uxNHbEoJohk+qYV+juC9jpBjr1yGRbO8qCcdcumZjHtYIZEKSYhBAd75EZmtd/VoACaRGcEVxEesszyJhlxw64Z8DilVzUjrcqbYGvw4W6ASdPty77y3flfebXV696rjfBqRvurSCq5zqfAJ6/KvzN1gYjBBLgDPRU6uVdSzrAhHv0l8MP4RA9TqHsoK7CyaOd67bGTx998iqO0AJgIINQ3Gc9V0uJwdK+/bRB1ScTseR4/7+l0w+Scf93Pmhrhcc1yzJFCvt7hC+b7il2bOscIwDJaaCvWv8yN7bgimvBRQUMYl+VNb3yklBL5uQwypYOSX9P/Jy2d853MQxVOlRARGcHZzZSRxFbFQ0nu/jEzeGWNBu/qvUgfWo/vZTutj1afc6CM6D01dDOIc/+ODTmuW4LXfgN6+fK/b6l5nFVeOftX95Gtbh89m8JEISFcHdcWZ9gHslMm75o7GWHEQGUi4GUpkJExubyx9SZAbyp//4EgEgOsbp2dpph1/lOU+6GBmZaa4RpajZL8otpE3Vnc5mwkqti5r9Y+b/nGcAgl/UuvGe1MQ4sXrjUKhl8thl234XrbGcs7RjvSt/e+k/UfPIlB8TDT4a3a1PJRF3QScShrBCCzSXUJvIu4qrnD6GQ3zKz8Mo+/+7lzrv6hCKUCeWMoT+ATMdbeGUdXHOvDAQIHkzyfBJ/PgGMB8Ts0v4q9o3H12Wg7iVu6JrdHNQIDPnnpiBT2QNPg1Uz4WLX9WMY7UDZ0BCCt28XqfPyeWzceg2qqXLVvcbY2sPNkJGavtJnKVyrBT10/rn3phWXmbINMS1pD/LAN8Xocbt2ZB/+3DJIG0N7UIKrfb+7y/vMsbfEuPlzyiiywlEZEQCTzH57k2HH/dyZEa2k4ur6Y2uuuuuuuuuuuuuv//9hwd1004McEPOh0kf/QLgDs8BA4nhUmLb/oQ+AIMBc+wJf56EgqYKNvvxB8vBWGQISAQ9HY00HNgP05NoEHojhwG1ztt9rUciqIkBaxWw1EsFBDTwOLlpBhZM2Cvo4aLgsDDl4NTC2gd7lHnqgBPRUG9M/w1AgLAxXfhjNcPDD93J9f1sxVRptmPcsmD/9wJBbBbq2bhkCBa9AAVWTofmSl0GPSTHgX6QBXLfKaQXiPuufNkCSxoBS3tNf8b5f+/LoDMoGYFzlri2J6v3VdcZxHhp0BXvXIh89FtsSJ9Cz9Pp0G0v29s+Mz/QvwDkPR2fL1nZqwGMwQptdeCTPWQTeCKTN7rmyKRKpFMekr/csHaE5l5f1QW1Bjo1k12M2Ux268724p/enxYfMxGm1TtitiY+v/SWKdHoqqeuje4bpmkOP/NA0HSZFY5/zYCqAMBXPVMEjqkiiqPUJcxcpsoCStAiuYmvLuVxjcIKyk0B75qzJanTeXbN5426v99n9sWU5OD+fetJRYMdB0MP/TR0Q10eTXG9LtT83K4+jICv700ZsXR1Y3iwfBRiuSb1X7/CTXiejkTo9Bx5A5keLATqqNj4YH6L3qBj/bmTuc93E2RAC6leHfAMaAAEAHKYpJDbX/NFmithh/Xqejl75dSLR069imrJrlsi8c3Dr9mg9KD46a6Tm7vaLZH/KAope5QVj4uwD/z4ruILaB+Jx/z1rxPY3WpvzsCmD3ClBQyfVTb0U1UYZjlo+CkfrPVtj8CbRgF6w8jWl/grlvPS5yy1tI8q6YD+xUpp658YdruJkm1MIMCb7iV0CHQ0FaGnJqmlrHUZJp47CYMMWFRBxLECs/avtzWMShzmmwBrJ3RKVlCnqLEQhkDCzWo2BN97TwishFAdAPrwBA33Y3MAhF1P7ZUiPw1UgAAgB7rx0O/5dv7HRvYqQjd/+v4d24ytV2OnrHn/T//eRNCjsYActl9/Ai88MLhUE0KqGNBNr2QJyD/nCQ8/HrCHmn26PmTkyMMmncnJ0SwORjjLRP6M02GAdNPNz9vgaZ1DxIw4gzDfOQvIGLNIHqfe/TvGrBSBQHspCLPvJnjPCiXhLJF/xh087PxT9vmlm1G5d2ngZOEldhREjiYF3P1G2k8gMZvQ77ZP1FqSi/mnds6zdp91cG6bXgsMcteksosLU7tSdhYnXMGIuCt6KUB8Lt1n3j/DK3AlcmgBSHckjtms4XgURooYvTWvnHedJ61SBuq6QMXm0D/GXF3BJ5AbHXWe/1HL3miZ8dncvbsxqB5p4HtgLKratT/jlfdUVeDVC6UW3S4NCIwEV41Zj4IvhTN0kKGgKKCK0jdh465s9MFQI9mITOt9Gm7F7akt6tV0O29VZMqn396xv6ujVDvv6NrfWS5foAkahN63eX1LvxBIBFol+SCj6aG2vV2WNaaIo9basYus1U52UDORLrpDdSh1aqI8r7WNmnD/go9/ZM5cveED0DsjQJ+C8/AsN+n2/sNi8JZYIi3V8NxI6P3bfyQ54VPXXXXXXXXXXXXX//+w4FIfgb/maPRa9COTQj/KAsWAmTCpjy/7MV//7XJaKi06x3y33o61lFuKzn1/kyv9Fs1jCQcCzHfiLg8jft5jJ/l/Q7uUJzWce5VW0zji/rF05QP42SRajyRBhYshRfxqIis1G5gomc2b3pmbjj1+g9iEtsTnak7aoRwQpXCR0EPIzCm3ko0nNha28V/UBuDsRKN2QkkU1W//nm3ZXdPyLymCjWxzo6+P9otN5hkzdZl1zKWsm9MPS4L2F7MH4wtob7RGnvjCfHZhfX1Lx68Y15n9IItq4NhhwbSD7V5tl8hmPoxCBObuguwer8DXPO+XEBAnkJ+HeyakoDLECxV+2KipMb9E5rGjJV8vGU2FJMMaOVHxObn0p4DEmOEiT1qAAnL7Ndwhi++INuJyagmrILIQA6pFsSYJgi+oUUdcWE/Fso3XRH64tHspycp2s1usuTYlWtvqxo4bmtBygInReM1WavwQAQNYYexvrCT+Co5J8CRUUwMC3wWdzCnoXQeSWMZ2eZkQNEOY/sSCmSKUZTmb/JpB3cVzbONzf1BT3MLKeA4vSPTu+uZOCE8MPQAU/8oUQobr5uBi2aXHvQ4FK92YQ99KzLeVnW0vOeWvfG0U3a3gqJFQRRgT0RaSSHeQZlwrYQ3/Qo+2wakhvzYUb0YG4G2ytwimmw6giWwlqR0ORA9z31L2wv027y/ADuYqYluPU3K+1C3JPiKJ0Oezc2K1G7Ask4oU7N5nE6JadNIcr9bYapaxPWZDH+BmmDuN6nnLnQ5rEd4g5UZGbc2v3qHQI+wRHk9gdNJMd1JkfxkvGST/Ba3HRtNcqhNKELLXkG2N6e+ML5usgY4w2X28kKVZoVuSmYjOJyfxpFhQapPV3eC9QJ6weRuHhJ+hOdG483aATTkq8NywbDkj0Ytxnaru1f3fURDJ7M3JhwU1NFe7EPEQe8iJ6fIOGjJF5LxNTSIZUjHjSeiyg6bw2h4En6gwwUI7Idf1mAMmVFBbjyRqHnrr+lb/OcFhB0lCRGISV8U2VrjxhT0QiUwr7DV2q39MsvWNBdpAr9CylD+V4FzrZto1eLuqG0dNo1ksIxzuW6KhWdXVXJmN412zncFPZ5U90evsN7lQGzbCZwW6DmnB8wIFCDfxpvT1n7a3r6quG/Mnb2stTVJRaFaLc901W3hpg1lsg6CsIwXsae+dvh/YbEkw2CoXslL/SUzJYO1+/U9ddddddddddddf//0YcHQbnIOUEW0i//noHeBSA7wMihUq/v0CwF/oUAcMVaIDd/6DkfCBQyp8t7+oJQnCJgYkPFVN4JNArbTRFTAPPf//cQqs4ScOaSkqEa41dHsyT1ajjQQPI+vFu8mduAnDSwtI/jsPOE1REJA00yavMJJ+lujg9hp1N0VEIJkldlyBBPqAHz7OQ1sXcm6nj0alU+Ao6h93B09ecul4sE/9g7wyrfqpI6A5I4gjMrzynHi+6HKDOE2N2nGUXPk2kHU0rlcNyDthgc/j0FqJOLJoNQeZlDzIm9ZhfQ0lQaJBYPGmmb03YYH2DOw7yHOk3ihSl4tkrxV5LRBTRXcDFXLKSKaDoecfPc8+j/d2Rt1dfznSxJRf17kwr7bfV4YRUHUxNOxzR+JP4m79FiqHlBGEXVaWDFDiWz9Af85SLjL+AEFzS7m7mOQ7FqfrgEn0/IQT/dwwJ2paV3ehvq7oWR4UbkE9TQ1mXok2W0+Dv58Tfeu7JZNK/beEAHIHDFuOL6/+QfdR+SxMTd1V0UyJK0FEcFTLOyb/nLy4d9USkkjD7xMKIcm+nHCUu5BY5KbPzw/fsTDzHtjxW8qHyMGuluA32PAyqnqo28TczdzIhFAVlcOY1UbTTtxVuS27XBlS0HGuvoExvtyxfyrrSfQvY384y28j6iZHU1TVtK+hD4qzfZhQf/lznEowiDzJ0Uxpc8s2C+lcL4JEp4Cuawu0Rsw8jU6Rk4qwXsbv7WfWM2iamGP7btAQDqwdGTYWopuBQv/qnS/gm3oSeWhr8+lbaYYabTHYHPTkxuZTNj+JX18VrP377o/PUCOotT3Gggorhic7xHr7H5OjJHzuvzSTPu6CciJZEHtBv3jfvQVt0iaYOY1+8cHW2hbcMYicPGOr0VIjrow9wOwe36LdR0+0psa7Xr1YkXY/NEar5w2daskcFwOJ4EuSDybizZ+jJkhcXXcjIUo8wcrH260g9O6d9MdtN82Wbo38JOznx5Acr1v09rdg91cZ2wfXPO8xpNF33FFSyW+aeg3HnuLvQbMt+ZxkEWw5JoO1VYrrrdMkZiQk4uTmDl/xAf7ux3QURR1xomd0qVOUSHiFvaTcH9DpLllHOzS31kAe0+B5+Wymw6JYBg9b5eb9dezm4iLkE3BEQ/IwzhpTSRAW6ePAOCck7fRuAV0YQ1dWrOWug1lCYOdWpTuAxW1mJwvUtHETJxMJCfSv9unADPoDWgLWMcv5P8z8STT4cXIWcgEWEnAxu6b4kRr0UgHKNxEj/D4I2sJFWrP1E8raOgL1GkqOkdlY1Pmav1bc0msOagUbT3LhSs/BFET7e4zPFLV3Mgf6ueWVNrUBzsGZseuV1oOTHDNHXm+wsstoEyaKY6NIYLjIM1DZKM29kodIikhcHHuRHvdlT7q+v98xWK4wAgGoxb39slOrviPtD/+w2Ll9CvfbpmVE6f9x+54xc11111111111111/0/9Asxl9fDvd+gEzXpvm9B70RKJpT7hDgTBkxrQxVH8HwSIj+eIAOP2GAAxEatSACWH7hZf834uCnqHhw4WExwPw8OATHgFYwKYR7QFin7X0micWADHA5tJlztXsSCeqJeNWBDT1+WTkBp1HttooPdHjCA4LIQCoaDUHRQ9mwCJyAM2VfWDqQVn2OSmBXhM8440BmNQQKMNHt0OKxVVqso5LYSkkQAfSDWIMjHIwm1qi23oWNFkouCePwlAQ4MADiMnPPjOwO8CZoO8LvMTckKLmDqU0qqcOMoY5U9lFPLSjBO8hqZ42SnwKVeZUfkR2hkeM/Awb7Zwgk/UcCKfuMLc2aX65Fe3gjPo2BkRGb8ZkrwOimaEdi8bI/qSxcgW+/IbkQEjmbo3OnOsNNXcMa/wTWY7j3mmk0wrwrbbKWC0xNaXl+Gj7k7Dxe+LIWJrg3hkBHtImGvYfjPv9wDk0QVyITvRui3jHeYABsTGYRdDW+0kSN9ddp1pkZsbUN8Q2Fa1PVGfuCMFkJq8Pz6voetMZyj8l3aMl5oMem6kTuoTUjMt9CqbI4WTxNxDs8R4YxTtapNMMtYhO6sxqiqodRL11tTo6ET+Q91dtynfU3lVVPHMJZNXNuff5Bms3DZsEIL8Y6t448WkkpvXRkEaqddNuPvjpZXRv4eFzXigzamNhMVbq8Mx5B1IfeHrqnavdtv1/0bCiEfiquqgLdeC5B4QuNDIgmcCRbdAMX76u9r/VfRuYoYkCq7ipJsrkuDcZamjAr826VPLc7Cw3QCxDPpooGRDBbu3QmB+0gOOwIe22VRP0z4vsbsvUbwZOrDX0BDzIFnTHO5hPgzgcy5xCy4nPYZXpiueYjXwL3TxZrPcXQwq2fwmgqtHSqX3W4BCmOc1t5dgmhCq++9aRbw7M4ntwW16WLtL4dbtLveB35ZSJsG///8tmQig89hC7ClZ2jX3ynFhcyohkfHrXy3LGCZsvw1qOJ/WOSodpKfkrqpw9iHLMRuAzVc9lP1G0+W0tVTNNb+fsGQFObd6VeGXA0WzRnZFPgAvkdJBeqRZrnARopbLiGb/iU76iL4v5+0YlLLzCc8Dd3kkT/Wp4/C96kIsocYr584QBUNxqQY9U6a/zd+1LZj7nvhJ/XITBJC2TOnLvEo11QDDnlvBQ4B1dUdu2rk/BiAyAQBycFcmjXuERJGG0TZgUgb5Flvq4wM8YHNsoLpWN+xbfNS9Pqqw6XYLpwMVnS4t03b4O6mLBq9jJ4GVsyn6KYr7wAYsUGKyma00JNJy/NCuuxKqHvwSDrC+K8/4HqAMMXq6nJMtkjfXXXXXXXXC2AiVtIOr+GR98scd5L+EeNpsxtjqdM8CJArgVcWCj340EAYEsljPAOyx40Ay4YWszGZfoT+Lrrrr6UpSelMcEvAzgDM3VWAchqx1/6AsyxzM4ybxdfNZxQH00m3i/3CeVlWG+BhBBVZM7D1ar/8GYALGFXkwlqLRTf/zjNbmvsETBiBAoMSG2g0xsK17YaJEAZtEBRgvONTx+3hHEVQn01P/+5lpslkziy99MXpR/zVP+sDqIQjXaoVdL+FdqnKnGdAL8AS4k9nAPFd8L3C/h1Ur1/TijBk70rL8EWiSUzUjPn2/4qZk2ARj407Wj2FDTqTcl0wuK3FkSlLefW+BpSKIQ1LJ2Y7rjdJbkZI9W1r7qmHTpkN+AKWXISAkustM/avToIrJui5sX+9arrSIkO+5pzc7H/D37qcLMbKN/L4ZUAUmthom1u4FWcgtNKUUxfUFW5P/drtrD0/HEOXJ20cT++nbuJj2xiripCp33+TXDxvpzU53qSsSKzdq7IiMU0Y75r/wavedhyiPyN7uhho5Nu+Cr+BuCrbjIWcvj2bx6op4/c/iQukFqU7TTqir3VGLD/4fxDOJcWbPguDiGERlnz3rnlkClaLuPZdRnpWjkQ0J+TqxE5gkq/vZf7npgh3QoI2MH/bb4dkYPdpNQPk0AAxP6U3CLK4PGX5UFeYGEb601/7gbX5O20CPy/W24BKhY1ht1bvJwzgbeDFipa6BS9rg+L3iDGbUoI4gXxkZS9MfBypzJlGal9YEvyLE7oF2ozNqCzkRJDVda+ffTMTwJKYGsrAbMBmuOJBa02s22ZrFJlCGHQ6jPwN4pDi1xjfkfu1f5p7/DWZcxulJ6gfL2NCIOqKUkjvn3L75YPMFLeq79YzMfM4x2wCnMxUf0i9TZrGAu1eyelbOpUvJtPBGVQH2IymzpdRoErAZeQN4vQXtSDumagf1n+CL11enRF6qf4iOyT3acPNAKJssnLad1UfXXXXXXXXXHqGMtEd//11111/mk/9AsGA4CkxQ5GQoEylZB1X/9kBoyzDZIGraCBZ2CAlIWEoEsIFZ+MooMSWlcv9Pmip7kbs0mg/RqYbKDNSgT+Jh+BEsd5/BPgS0AQsmDYUsjATcbqWRss4MN/fVhcugh/FKpiEufL53k7AqecAbNo5HFAf5c4+wQABTBzAgACtYDimxpb8LbiqP+iDxfixLmYGJpc/v4ikQskHHR9Li+cGXAAmcYXwnJQYOAI4nJ5Z0wcvs5+sy/8zKqEOFQojlUvHRfED5c+X5+c5m6b+rhXlu1bRc8rzewYXu/XPDDREm1ytufzzVFdg1gGsZgVNq8FUsolOhWdRqO7jrxpBHp+GUEUr3wgWlmLJM7Hop8TAycDNXjHoeIpElvu9ePBwc61pKkuxuF7c8Z/CcAq37Z6InW/Tav7+p4V2e5up+NLwCQSwY4htMa9CIeYH2gfOTWyUeiOg/0rsvyu4mYKI4jADV0LHKbBvmJS/ECBNeuwk/0oNC/2XaE9TC2hr+T+gzxsfsUsrE8ahnomCZUq6hUGUsPCqkEL4sl8oDeD38FngXL8OOP3ezJGiIJmKE75fPhke0YVojNztJuRClL85mwq5R812/KDc9IUqNgIUMpW3UO22I+88bCc201BREyye0xa32WttdBmDxn/xiTApFzNFnyN6wu+++XogH5pPfAEKwm6+41f4B1ps9tidKJx0Hxgbn5XZ6mZ3sGVPqfHN2JWPkUIzl3KNr3+Slv828pH8PYA6Qevxvze0+fxWS4zFLPe2ohIQcMYCv5PT+oNRBBYOlxck75meQfu2vmP9TFDJMFyeb3si7Ug8GEjeLF33+tumMQ3CR6neJmIm8vQ7z+JxEJF2ljjqTF6iXk4ExNkt1YAkf+2TlZvNc7CEgQ4nCEZJANrllefn9v//BXUbEh/BxHEof8f76i6666666666666666+fn+ehwQzHut/f9/Quzg0/0NCNxJffjiDaQEZsXit08v//3Bk5rSmjg0zDgfubF47XXfPDCAAJhjTAgACgOcJ8ANJ+KiPAxGsNdogdeykDgrpbar5tM8z8/vOcsUON7l7ovhgaUTbiCv7e+4MDKNhLHTQpBf+AsgiXNQrxF8tWfmkZrnEvAQAVUDBACRABSxuF/BpflgQcAsRdg/D3wd81QeHIkWFf91p8puR8HuM51gP2Bfa7LlVIf9oSaW/g3wIkY9wv629R59lb+SGiVBD4lrdZB9UULHPYdoq9exIxnp7faG6imlHtqib2xPdyyOKrB7Bv48AEABAUBf3cc/O0jPAsw96Yk7PGixP6rCTSURtSasLq8fexPMtO/e8arDCnwdy4VerGKIP9+MAWojE0lyrYlhwXyvkzpK3zstfj+JlLH+Y7txRmwJqF7qmkY1C6zQ6oIgFuON5vyXqOOAeMJTOx0p7yCEzV6YfPxqggQbvHLRiPQvE7h/lMYdf0Y6wxgN8JrewXiRBJLH0FJHMW6BSFtogpum8npM4hX614JWgSKjTCc5szum2faAoO1XFlE2Uk2LFBEdCW49W2dWkkaIRxfvrsKDojOtFP++PAt4qudsu7TgIitwPnn//I+TV4vwNRMn83d6ySqTaR+998556Jm8gFpLTxxaf7HaH/CWa8fEUVlaX4g4FDIpBZiIAMGuLqbOaqg0n0fXybO/jlJ62A1acvfQB3IxSV8+75saYQL5h9qDVVQL0PpNuZ4wmmBpDUb8Y7JxzuBFjl/ho8yXLfCEzMQkTQlLf8/9RddddddddddddddddddL19V/0OHeAVFNGWQaM6Q+YAvBYDtxw/A9YR+fTg6GN5AwJwYFOaEfnHPXnxDPeEAAjPcEAAQDkF+AUCtBGi/z+LgD3BpJQdxP3/VdVXHX1XJQolloatRIC/Z5gzZDvn1yQA/OFmqTDZ0oO4eAfnZlkDQhWGEALsKIvFhWl3Ht3/6TIctb8Vj1SnIOItDqyKVEWHv84gk6AWWbCC3TX/BABFsUEAIzg8cbJjQLLgUGMzwaCJlhifCcawQp84JRtRAL4DDsE46pa519AzD4v2iqqp9V8mXTMzOHljecf0XLbzhY/VprZexC1gZMDJZbhkgHDHBbdVkUYSx2kbuvEJb+GA/orVbuvAAvx5dLHVXd8MmGlmsOx4XDFVuAb6n7dlcRqUA6g61Euwxkt+BK2WwkvLcRHuGYw0CjRKpvImn/Bf3NymVc1lQJSZk+b/TRb08FvlsMB7rSalj9xJhGrr7yoRlpk2Q03XXUyrUlVlf0qEScP2fbDVv5MN59Sj/TO7xi1H1yqiKBZRFlqISJ+ePMhkOVdkK0LWMVVqGcd59J4g5hEAaxEF7aN9jqSQbHHbXcLUN9sYlxBeeWDRZRr4EAcKFAgUojwtU3CkZcfwIyPd3cpMmSf0xw5vg3wi76Ii+bqMSPk11gCu3064goubUj3Uvh7LcQzKa1nM9e3mTl//BWU0WM1rCipYwJCQ8rwhMzCpINxKT+VfvqeuuuuuuuuuuuuuuuuuuuuPX/gz/+uPX/hodpvjIHpUFwt/wGAJldByIegM0v/n/rZCYIvbpB/J3/4pbEPJFegGZND1xWI7MBurgg/EMlE1BfvgwAYOCAARgAb8yBKjYT0YCs8/wGjBOPyNgZ6HFYHpXz/+qx/46mWLEkdLqCZSmAK6AlAeuU/556QgpxSb7wEjEd8wF2FHQ+2FIEv4j9JFf+dgzmwYN5ZTCLLDiEJcMvzAI9BQgQxTf1/geE4UHhUFH7Ot/kMl7BKQza7CAgX2Yis6eVqIGH/W/5ZQhyIAygaEoIIBwiZZohEB6o44SHo7aEL0zP6I2Q0QAQOyAmvg4+YP1IbIBVz1jSQMWvnn4bOBwDcTn4olT9QsLpw8/isHLfdMnwi+6/+uRDJFFHU46/DNOj6SAxRIJBqKmglQcdfh4JngJuEvwPBBPoz+Jfn+IWgSSNn22b4JZSpcxxFXvv1KA/X5z7qshSYfD+hej8DVp3zXEwXS0RRZVdgASSnTz6gkLSU6ztWvtFAVA6hO9GSIpb/s94GZjdnF6ufjsus/Usu77v/6DVMzqIPG/M325+W+VzwJb58DAxwOGstBgy0HvXyj5nki/XXXXXXXXXXXXXXXXXXXXXXXXXXS111111111111114AAABEEGaOAr4EjiwlgIWl/YYFzo2lePuE59t7yhXcX2lFiIg96v3ov+/CxffexP/JXwCPp6e/7jih+Q69WO/DGHGQXbtXH/BGaHy86Xdl+HUY/vvy0pBXubyvr1TfCW99pZf/WFC/+2LHcTEXezPt0xN8y+QbOFhD+FW24y9u92MEfalw7vBKqS2bjcpbY1aOT6X9/EFCY3y904sbx4iGu7zGjAcr0pHuOK3lXMEm3Bdq5gyQFZztvD9eOhM5PzOx6iEq/w+iZHl/3wV8Ewnp8C4YLrauFdhSU0n7Akdf6hV+KEO7u7vvxF7wDP2Jj09mBKL998TL1pQlurV3hrxFqFX3TbeHHpwQeHvEyYW8EFZJsCRAAAA10GaVAK+CvzDsttLd82e8NF/3bFkAg+P2u667hKlKvB5f3vH30+HoNn3DaU5nFzJl6ERcNdwDDc2v++fvl3l7lm+Fl7ZRgo3Fb17U9wr5jPhlKE0Nu4rfUd3CcmVa/hN14q3bw4k+4LRveXcf3ywoX/33F4Sgl9++7u7755UbkIfk/uor9KE3fV6H4iOuJhPWKn6oR8TrWtb5cUd73fUXWXPPzwj5q136QT3u932Ur39qJnhLyEV5f296RGrRVwtPLH8l37SO0Ku0X05rhqep+S9P+Hvh74uAAABekGab0pAK+CzxYrF5i9Ge/lIp3evffrDPiyT6ATepuPAT1aZrf/z+/gP7jITus1O7uHodNcOJJPxlBJ+lbn4Jpi2LkqjQ3EFTovrBEVz7mbVb5Pr6a9fhO6PNry8LF/vLIEHf7b996t+/J6bl/9PeWQ9794TfthEUKN3fgI9bS2+yqdHYmJB+78eZJVi+T3S7fqi9NL98K+bEKCF6ZunxhH7ef2+3sqByqWPapttJ8ntNfv09V9P0/br3tCTzKUll0me8uES/l7gnijsS+3G0Dkqg7aW4wgr3co45a6dvbvfJ7p+79fT1RfXk9JL36S8t3Rj3uEX85DX/pXJEbd3e+k+Tp+uqfr6+v2ruoRplZHv1E9svXNVdcK+UTP77Xruhuqvq5H1wj4jGqfk9UmW8Vvpu7OunLxrT0/XXJTSdKEl4kJ1TyUvTyqqkhR181DYShnB88v76hUv6+Yl305ZZROK/Juuye20/1C5fJ/rIuva71dQ4X8lVh/42AAAAbpBmo9KQCvgr8o7h5fIn5YvjTwusyFLXL22gx5iYKP/Mv/uXgj27ASm9NPd+4Qh9H3d9737hG76RcG3VhGeHW/y8n6XieimbssJ0r+eHcVd3u47pry8IHlYVX2LCDu4KO7D/+4ks8vvBH4dsejvXdZP6TVxeqErrXLol33S1ynFfCZf72wiOe/ke+yqZfp9+mzefO9UJ1vC3m1nQcsZCNMlLstuWTBqu3ew0kkQEBDdrzD4bsa3hiU/1WVdC3035fQ3qv2kLVgp3lWyfxwSu/eLF0kNIsJ+CfyPixwfs1pcsdFatCLZbcFUaCz3bveT0t/+nJrDckWnNIelWlp7sThLy5//GXd3qld977q66uXyYGtkh3Q2YTe9NZDbev0oRX7FXu6Hl6auh+Sr2mpcIl+L/CUn9a3xOCcr3e2/uz6ruvrqha6L64S8EkzE39v3H3e7tvd3f0Ur3qz4hpP/cI+KIZivk/2Zu3v0kd/SnoWunhPyHm/9kLzMboXkob6rrMe99EVeyivhQv5fkHbv1YskfVC919iHpcRhhV8l/UkNZpDiOL1viOXMu9YjiIV5vhDgQNuMreZ9JwVwAAAB8kGaoBXwV+Ydgj3LY2/iy7RkBqVO/5SJTi6ov+/DPhEmMxe4CRcsngjl5Jrj/fzPL9xoO3Qe4+G14v0OlGYCcixrpZe2qtsFctMgaKr+Iysg+A+7XgRLo+cm6/zJS89lpHJejxFJ383k/Xf8vv1lu+Fy+920CUZvHRO9LGD629rp/Xs/Sa/Za5Qt5hA3h+4TWXW8sTd33s5iu6e1fd9E6S2iiT/1dF9iUK7u7+yOEi//Yo16J2++8IkP8okG1oEA3dE2hkMOpNx73fZ1qZEX9PmQC9h/vVUfaV/SW4TJnX7Zsk9psvfeXHj0n5MJv3BZFbvrV3d+9oFN0Tl1E6YlveksgaP9e9u7Oit0yHcdFrmzk+kq3fJ6S/ku9Hk+m8saWkVzosmG/e2ht+vv6ve1JS+oR8hI2k7vvBNvXmhFdP1XX19ZMRj6S6b8irUEJbvlCL9BEoh7/PVaPXmVWJfdLXf0/Xa/Ju7hH2QdpwZvf0CQrn97VX2Pff0IWnydtf1XZ1dlSL0I+C2RmT+b239GI6TvrEluK4o4o+q7T7eqL6+vvtr5JfP4R8ERFTW3iQj0MSv2vf39fk1bffVlWT1/9nd8K532lZP0tX+iSmRhibO6owl3wq/lyfaftKW8uJblontP3/TX3Wn2sMWJ9Edl/9YEiAAAAlpBmsAV8FhfyfFis+hN13Ehlr8WTfrmp3rJcpOW/lLcscMeYmiD+T4INupihWmX6BveltN/vue5siDj3WapUH9+2MuiwktmvPPDzlNEi9+4elNdxnt9W+9nCQfbB6SP2fVie6z68pzykNQqX+3KwiFHsMpBUz47lhb/uY+HWydK2/d9fXqi/sTXKnl5O7y/7oSU77hMv/lYKBmTgFXr3+ne8t4t2X1vy28otL/uV003KunrVf+qPl/RfkNZuEvMS24bp5L95U40g0d03a3vsyB93t4CeVV63rXHytZv+i2ecqMu/Xvdiem1wXdok8Y3tpvwmTOvGcebPj+ZMEOafPDH8Jzim3ot9MC7RKqXvCT9IVP29xPvvZ9l3Pr3VuM2NlftLJ+RZ6YsWmdmGH+1ev3ffVJ70qKGu9iyhHe+5fk/bThPf0/uR3Tfovd5vDLO6bctwEcqzlr/03rk9q/Ffq6yLf8Il/vscQ/Q0Jt7Kxdy+7pcd3OMvu3095BN77ft+iLrNlyZ1p7iDbsjJ8j7bVZLFohX3010ZR/kEU5f7HY7173nZnf7OE8jL737+36+va5utcEZ4b779fXZZN779uqKbFbwkT1XKaVMJS9xDS5dn9tukuWT2k19+r7Llo76S2nvvfWoR8E8nlQbqn+sv29rmV+LJw+j8/RP1fL/J7O0eKskmxnxfQI+Cc12OpPX/4IyPL/U18g19y1/mJzqKSE1L3cK1vErOn43I28/r90TtJUtZJBPFYW1ExyT5/es3xPsTqva/C+SQRENOsX/Wq+y+qhzxB55bnlgsgAAAoBBmu9KQCvgr8w7cJHnZLJcXRk5AV8zfi+a0Fl8NL3BSThTOawq0+8gQXgzuzf015k/Y3pIq9x88TZk3pw0ko0pX02W4u9fLwm+nN19ffZeX8vylt3vJayenRaTdZP68ssIU3PVlY9j2WXeIEcKeYdxOFfYkkN6O728ntp24l4Lj/L3s6dOhOvqi4Vf4LCD846unHfSDtar9g1R+26aUIbdyLn+vpkHpi4bk/vIsxXjI6PoSUvBQ+pyWeCTPGA38HuXO1eq95dDmH6vSBDy/qE/FEpytlU17YUIG3S7Lbu6r9u9wCd6+tDtLBEc620WVApO0aBG8dSygz98/uwny2luPjP/p2bPw9Fy3KrV8tCg4jRd/8v29KL0CeVcqmMCRYnLrsaf9pibj7ZXwJLRLPzzOl5zpL6mjSV2XanRGCnMajP2VkoS89/+t9ZT5PCPihVjvCS1nPl/e3BTcMpHXvfQ5oT7SSdb3y7cM2Au7ajPf/b+rocObVCO071Wte6LW+lcCH2er9/+KEO/w1JeljL9ie3kcEgm933t4JOX5Qi/yGe31tEghu76uz+/uzCeGEkqs9iir/QmIEk/d/X5KvSesxL3CXkvf6rT3tLRPu3/7+6tpC0vpQi/cOzP+TyMhvoTXGF97rfPf4Ii8uRj2+hr92Wk9XfQmbe+qLd9U17wl4JDG/Ta4QwpcVu7ob8/uHjpc4tdnZd3fZ6LXdX5TcesXWnWXvrhPwvUm/M1v1mnL7oS6iSc/vvehxkjt3c9CeT0kqVUqf4ku73dwjWTDcmOShL+JuhPabmkNP/UlE/X+byYUdcfk9KvCC+6vSppWRIveRm3f6hpd1pW+Hq3VLBXAAAB/kGbABXwV+LHccj9Uy14vuzzzMfXl2Zcwx5iTphN5q5jKX/7BR4wAxC06P/045bqgp1L+9uO8tsvGxd5fpMs8XwDvWKfZ+H3KXtK3CfdXu924npcnfl15T4QO6MKv3CQUu3a5Y35Uzv5JaqisnjTvoq22VPdCdZZN2nCfm8NJMS/+WjPl++2tqcvLYuCTwwkj7WVX2dFLKVjNV1pLqFH20CgQ9zpZlnLixH4JjwRaJeUj2jUW5PF9BOUqd/KENyKsTBFy2YPPtMlxRHj59ta92ZvCj9xUVu93d+0Mjq9y1If13gYk/ZcHp7ve/RWGeAVLB9n1Ge/8n0lnv7TsbBLSrz04ZPd/ssEZ51zOatF1ZZc31uBH7Vcf9OKESb6mCUnrX7iSk9C4BI7uqve8tFaEvBUTbbhN287tbGvsEfh+tfkt8ntOuWKXX1kJHaemhu2edqZf3u9+iyle+1ka9oQR93d3CPKLEN2/N+7LZ2/5Raxresurohsyz717yV7v6L68vv37ptkhJfKCq7xDjvbtE12Rg9tXtVZWUr777svrtpF8I+ySZ35IJ/M+P47ClLa6fqu7onrlar9/XdWbe9tfCPojry6J7f3/ckm16e2t0foVfn1yVRdWL6pM3TRWRfq8LeCO9315dV2t1WtLkwzp3m9ZClW3DWaQkma8l3wWQAAAolBmy9KQCvgsflmGWCOXaRr1NQMbffr3/F5LDq4h1cpcg38vOjaDBf/cEBsdGXMdcwoAc7Ilnj4Q++9Lfwn++Jv3GlMFM8zXwQ/OWZCw4FUv2ZstqkrMtPl9N7oFZtxZu3IqRxFmAwI9LZy5L5U1r1QnpovBIXisjZPd7umoWXlgmCEwse2aJH24fXxNrdMUeCZ8/5FwSw0iN3399b3/LWU7cW4T8KDOHHtfU5jwmtbtCuMM+6ibeXiYrf7s/8aCCdlr2vevBJ4bi8tUvoxbvCl4rO+PnCYslINyaumzlGGDahsEn3H8t9+77KyK3sSr7cFJawwlphsnt+y72KqgSXFrat1Iv71gqhfqgSUpKX2vgqkTBPUpR5awGmtspyhoM4juzwVYcu/ScrrUNCOR6bwggq4mH1k93X8JSjkZl5kL4UL/eWK3tdvFdu/x5r94Bjex9H65l+T203ztwQlwZsHzHQD2Ohcf4c6ZoFr331guq6mmj3w+lyp1+0nUhJK3N3RIn390hPbe/2gQ731CPhIkkeVjvvBFe48XNtpd9fZPf3SPFvusv/7EGXfu8qZQxJ/9ZN6UIeCYzGZ7H5/vWJ1RfT1Z0hMu+1vSv2WCLKx9tISiQT3d+79CPYgRP9a1baPl19P2bV/f3+R1T+2vQj4qT1t1GKX5YTy/e79st79aLbv+qoT7/aVu2+i1b3Eb3vfqTe4R8E5qis32yest4kIm6zOlfJ98l/e/f39DV19m/o9b6y5e7+FFn8jr+3y+M6pgnNDsy5yhh92F+yftq2WvkmLe8n9X4RDBC8ZXkwp5hXN/aPLsrqhPquvaf1RMLZkQl3+/Ee/sR79dV1sWf8N+yB4zZV734K4AAACwkGbT0pAK+CvzDsljMWX9dy+hGiHC/75SBChwhepY3Ee4UKdr3t/mQMKEfpn8o7eP2Bfe4s3Z4BxyNTavfLCZ+TUQ5L/LUUp0JjqSd73kS1+Czx3TEOW81XO2/oZev73feOTNO4V8w7hvhL+nsoJCH9/8v274o5eRSH2F/Hf/vf2Ku5i2++jyX3a/Nme/XJwq/bCJhI+3fZDE6/fuOiu/d3IdIv2ke5uGkX39fRchPey0vKUMoN1z2evbSGkNU6bX3d8Jvehhnfzn7C6vZZbu6R3bniGvQJsah+oEx9hd6socL/sF9OsHVVQ2Ce2NC7Lfkpcyfr+4KuUPp6MwRDMpCE2vvDo/QoNKL4U/LmOJtw772YKOBI//BqfcW0qsEuU3BJ9ahCV7F9dkwmX/3BP4erb7vWt8OkPQ1+23XaSXgOGimB6d03gxzf/7povZw9a9raCXvk/tZXEylui3cgnt8Ve9VTbIMPH/FdpCZO6aEkR26JCe7G97hHscR7eO0973vYLqfdbsEL7VCvvdCy6rc2795T7vvCZOMtzdKrPcBxOiXf9r3Z9L01f0mNedeP8wQm/8EO52Xs9reEQQ3vd2JLSWi+i+7IXhBw8XTSngoK+46dfI3bghvvWT7cyr6dlTW3Usvd+0FLvdN93e79Qi/kBWItjNNvN5uT2e4u2yRRXiu9+2lb09t7YTKHZP+737x2773af3XTXa1l6axPe7ZPL6dslcoS0iGe5vd99CYIT3u3Tl3vuu780Ehs93VQ9VUhBL31aiP5LCcqLCPkEKq15NaXLYtZcdUXRP6fcslyTtKiNGKm0q+k+5YT8RHFNb9ahfZ+6+vrsn0qtLiyXnLv2roU2xxwgeZL7vnNu4KXugnd+X3C6TsWy9N9at19dVZCD7v2+9ehlL31fXTT3MW9w35I90+T1zMbGRXwhJLjWCyAAAAJaQZtvSkAr4LPFjM6o8jXfxmL8XSrJF06tuHgvLeiNny+WYX8WbmjPg9MtHfhHknIFnwAk9oqb/q9Du0s3uCkoC628IcUuiXT9uu0fFM7ZC7PCBrCHhdNpwi4HXuVmMhL4fHd74w9913v/3klu8/oTNq5kVeXvfE3e+WHuW93CvgpCVEq85lrJEfnLbcrcIncKkfx+Zv3nm8gqNPn0Ur+QuMNfvre9ViWW766/ZXd96EYqlDXWvx/8KP3FCnH0J7ufoun8IS2rQiw31fMP7su39VT6rrVFWr+2CPmf7s9F6FPMSZe32woQ+O72kSavdvyLw0k87Oz3f1cx9/o7Ne2WlGHpqzSF5fpzGoRKeyQRTG2UdFfXbgq2G/XXyqQ8RZ4we9/vLywRSL5GeuuEfBFrY+yf1u2cgKe7hD5nF2LzEXal54Rf6tagvXeE5tg+t1QYe75PdvXf3BNswenH3vYaGvrBGc7U+urEkmr47f6aehRIMaT80tHiaRep4HyCSay/LR339Eu9wj4JSEf6dPLL6uR2C29vz7i3utdVv6y2Xk/a/wR33YhLLBLfe989WV1Ynv/3RX/XpX/CT9RZOXvvoWXtv3vVX7P7qie9ly/9b/RehF+2CfcsbbTeYbL66+T0qJLLLBNzSkjfXdAhvv/XrukxOvBITD+x71QtdJ2vIwRFl9+hHwTkp05tmYbXjw75JLpf+heR1r4jr09epS3voSRa9ot7wqlzlksvSQtKYg+29+q9V0xOr05Rmyj+PhXXsv26vteI/JDC66vRoteT5NfDd7nmS9/D3xUAAAK4QZuAFfBX4sdsgR7nqa00NtF/9y2DuyvL/y8En4bDHmJKFSAZ917jfDy9feLGgCDhnFr718WTGCxnx2ALgkH69/rdwoULrL57uYNPhrAQ/9ePv521dnYw1zGpa34oaQPNcKKSbwXcn00X5Yw66rY+Sb3b7LUYg6PBLuXJ84tTd0Jj7vs3lpe9e5rvcLP8FoSfdtWkv8vru4895zZb8ZzsXDsRz3/1WmuxHcrn78lYq3+jFd3ryxu5u0zwl5jVnO/FkptO5EYBPl+aqs7zL/vYLN0Pr0CxNI3SzdHe13qqrwTl52OG5Y2/BXw5fdsDCTo92Zwc/raR6+XCmWFBDhE/o92W4+7u77gri0LGq2+sglo4r9bSZ4uiNpPwXedcqAhyUrWmz8FXOpubbDhGQxzQKZ7Kb8t1YIpHzT/2pP5f5ROC77zhEv/tIRXqCm4cHI+glw4W9ZoQpLwT/d3d+9h9TNxaukJ0/u+obArB9tFD48qxbVhana9HgjOMur9eK6f9PVCRAZJp9AtzJfa3tCzu93fnYXhFbcoLg49Q7z/v0+01+q00d4gr2d75PSS/frBJIv+7GxBpn5L8ntt7fs4fvRf/J7/5d9An3vd7kI+CYzHP5VM/3fvN/RfZX2/bXXk9Nrcl0Jfo1e6+xuqKTflQi993L/s7HZ4QfuCEcp/u9fKkfUm2r9av5Nu0jtld/W7361LQj6I/4qfxXcfXFXZa9W2uX119OE7vd4SvX1aj2NkzL965bu/ydFKYt76/wQ7z5UI+CU038uq9P8vVdCSlEu/8mfHtWX091iTO/lDT7aLu764S8mNL/iO5n5P0uINkzexfJ7a+SKmu/txMDfhY/DNntW871q+3fhbUt7yUq93RN3fZFpNwgyT98L5IrPlqt+oJtp9315NUqKVNLkm7lz0Xd4a93fBdAAACtUGbr0pAK+CzxYzOiJ4++35f98XkfLog/vpcdKxeWa9g/i+TQ7qOSjwY8IG5bILuBUAIp/VZm7tXC4WTl/dvDxQneK6rIPlBvLhTapOfOCd7kLNT/4QMX/gUbCheFoqBDZ577x5ePN3axqlrz8v/uqVVTiYJN3M9KpP1btUnyS3tZSjujcuMKv7BYFHd7VRnnTfJvt7gqPw4YJgbcD3LPKNCuQo7abKndIj+tyzn99lqVO/vy/7eQr37xG933Chf/bMKd78rBVFH352CNe3d+ECyby7vKPaokX6Gy7309JrE172gtxktqPhJZf46vLT/ff/7vahLzEvDsiX2ofIfjpy+725JAhbHvx+Wec0P24ul2/Yt5YVdFiRLQ2Fr5I+4bIVE99xlmvq30kr7S3GbyxDkFpaZy7Ou/D0WLYFL6wQVaCO65kPwnOXOwUfWO0//Bbw3BUXMm0MpIjs8KFH8gIt3lW7yxu20aSSwxJ/oEkXbu93q71n/+CaJg+WrMFXvhhfsN+tXBDlVQFG86hcKTqmLPH3j732XpN9m516tuhJE3yhqc47s6CQl93e4SL9PeCQmfrFvloRH8du96Wk1d2IO7+76PMQ6enzWZ17UvZ49E+X/L7ftQkt4sEJHvqaz0dv1rq+s136rSov/pAo7ve/Qj7EFf/ii7tuPzrt1bovdlfbXurd6tWu/r7p/SvCPh7NLMxbyd1lE36zQvtEBVl+W2sve7t2dgm7vit5fb3d9trl30T7/taL7XvXr1ZK9CPgnJXXF6zdN4kIiSa1L6/xC9cnveQn992f30vowl7unyYmN5X+fhHz1+O91t+Zdyy3iycVBmKO8cvQ1FK73k9eu79QSbv70SFHfE7sy3P5/0Jav0/SXWvdVRP11yQQ73eGFieidJk+TDa/8REHNfZPoRCsFUAAAAtpBm89KQCvgr8w7HHJtJf1+l7hje2XB33bD82N/LnPYZL/7YRJlizbRwBImSW/rb+8vvaThQoJ38fWU5d83tf15hFxamvxpsEFsdsuX5j9u5VTokXQfXxCslty6zvd/UWeifk3J+mXl4iaHOZtvr6BRd7u60y+EOGWh23fnlCvmHZ3j9de4JiT7vso12t3Z3yHr37vcwYnj3CRZaTE+be4nbmXbfj/68Ft38/pfL10WS4ZaLmQo/xQoS9E6g+e5Qt8Fe7e9G7xBXn7+4Li5I3lHzjT628vUSkvd+t9/RZzx0w1Lz5PtvyyrSlokKbYUEBl+u3LC7HREO95M5Jgslbft8vur465bpGFA1mB9H04W6DrvBUJKoEQ1mvpv9u+XQuGCFAvGy3vdxlmvvcmZM4a7c/c4Xf7gnhtLSw7oRJHrRoej4J9jrdO09UEqThuGq3Jmz/ddlwl4JO5V6/BTPtvBO+thCVWHrJix00Pct9Y7VaVtngye25X6QK91D4PcY8eB8YaX3/pB05cJHH2P6cE+9zJd2Cw6o2jv9F7obBNgQ/vu3+79VC4kQlIdZ5pBnDdrJoWxWma73vk4R8Nm3fL4r/cIbvd3dLVNbFeyRZZ17yg6X7py1mAvViLvvf8JZhx2gndPZ5BDEPCV97PBALyJVy+fWky3d7vwSb3c232iMQivlKYr5WMv+uEpfvp3V6Ozuu/txF33f3q3etd9WJ9CfRIIt79CL/Ma9+oJCuyFfu16PBGW92NJLaxdOvbSt1fada+717eSmTe9KLT5Pbr3bXuyFXv1aEfFS7+Vq/BER738rBJd93SYIyu/br8YumtvZpSO95Pb/+rfvRy3eTy7TkIIglyb3vlCPhcy5PKgovZYSQmhd5eT1X+gQne/K7qqE1bVVRSXvzOFPcnL10/Cvr6+kSq/FeyoRHS4/u8MZclfrWoI+7zQyT+v/Vf30X/SJ9WLDgsz4EeAIRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vAAAApBBm+AV8FflHbut/y6uQc+Ll8JP42VpkGPDhJYCEe14fo//GdI3lYDjUlzY0aygoAhHq7fTVvU+sv725CzQv8PmzAUxTD0G6MeVkER9Xt4LKSs9r8vve+tyTHz89b3aNwTfr/6CW8PLs3JneFvCgQh7M+XQrAx1Wx87QpdizvhxV7jS7l6jcTveSuxAj+nx/S3kh/dWPcYXywjE77bnRcg4t9yTBu/4giaWmSWPrfJ+3Sn6KcEK+KGT7beEfCuHhy9tsEu4PO32motHv16fJCfhF/Dv1tevKRehPcKCHcbNvvfu8doc2ji69m+7HUc5qa7xfiVaj21eUpSv17QIyhBx+M1RcZRWrxt93k9psTfxF9jlhLO18ValYIGuE/m3L/6gohmWl/SfIKlh6XCT+Ze9sKXP3t4dYOsBKNJwdxtMRXezJ9Ll+HqdFzr3rfQfyY8bT2l8g+/h8tb6+vwYaJz3OHZx5fGV/e29etdv2rI7/0Jf8EV7/N9Pr3NoZn2EfZqunX26tMW/3sS79lkaUfqW4rlXlXu/WV/girW50RR/deQxP16PHa9671WdLnmJ/XXItQh5M3/gmNI3Y25/Lf2Ccr5dz5qvRXN5eCPe+WT7vHvf6BDe7t2Pq2m117tasaQEl7v29qlr8FF373eEfBPXTk9tt/QJyPd3d3b8mnfuCUt3u//o9YvcEd737/Ge3q5afeCTe/b+16EfBIRVm7vEhPzcv3/1yUXk/p39Ffb0mbu9a5L3+2Xd70spLl+EvBERa2urKV33fVFgkMzcw7H3ezlL3/FZf7v0gT73d31C1L12117shHQ+T20+8i2u5t374WSkLq7X31L5NWZeKmEnzr4ZX5CPfWuSyRUifD3xUAAAAq1BmgAV8Ffix3CPz7zX3Q4l8Xy2Qs1VFktI/i9XP0DxW/BJw89mFC/mJI6Gbty/+2GMNQQ1IAZjT1qf7jfgyLDrUl/17QeLYShm4OfDsWlLMJRr54NPD2P1l3ljzZLEj9vhqR0oQ+xNFLa3el7ve9flst+5SvG4wthV/YLAkf1uYRbqaR4Pur7yx5bfdW8/uLBtmvojEmH6SsqBDzv2KWnfvrrIV5c6PUdvff5KUz3l/yvLd1cJeYRLGaV+2Ccglx3dy0f7L9O+r9Zi47Ed5HhO9+m5C/+WCEvLjddvk1qEvManG8r28YR3d3u2727yMtfBbBI+f1Xx5dLBesoNJXaPFk9V99Yy7GxXG0/3jTdbeo/Q2jBc/50emEPElbXQSM7uOpvIe9J7TQlFvguLcu7vmePs7z/yViy/5fCPgpNKx4Oqt9e4dhlWXbiJli/jo/D6LqDST+3Pf3NyP/tSvBDE3885uX3JLSV3T5Pbc/xFkrfuUpV3otH7T/txVPc1Z/7dke/tkp1yeTCS+XfZV3fd/fWhL6XMxnd8w2U7eZXb1vrIIGK3jf1ssEu0/PJ6bfy9r0lYhDohJ2ZV/jrco9N6btfKxvZafdu7lk70eLoi6+qMV766dGwqq6u7OQmX3CJf/f8El5Ye6OvN3VfkX5T7v1RH/WpPXu9fQi/1f0QhH3tPOxJd0TxW7say33fi77HcaErLVAiuX/Qj4J7ZoUy78vwRmurvXsbd/UJ5++9+77+j+sERE7zA1+9pWqjol70T1y8Qt73CPkx7H+iPfs73+WuTXJ701FGyg0NypPWNz/qyl4CD1Pj/pbVrhbbF93e77G+5O/vWvUJay13f0VEu5f9wU3ve/d4PZoYuUpdJLRPpGvafZ0Er3d3vyMnisNeII586pVpkuN9+TBXAAAAC30GaIBXwWeERnNmabRB/zEu4Q5tpYb2Z3tyv/xPnpHflFY5f9S4Y8WZngBi95TfZGBL6M/cFJQnejxvj9u7q/7RSfCBugIrGQ8GkRRzBd+MQW59gjO9TOWduz9ef0WInzeife+JisOD8/cOOlhXyDOG/fG5bRK95w8kt3lqjSakKNKXggbu01Z6vDjHL3vYi/FOHV8E0C/uMLKWj7QcN7l/3lLSf1e52S+wywk9tJt/fd+tU+T1yVeta5bvy+tfCvhQVqicx52myeqngQf1/sNX9nXlr2wp3tMb5OK+h68Q2f228u2U2/V2Ji72d4f6nu6Kgj3fd7M1fVku/q1f1X+ki3E3fdwEX/Wfz71Zowd/tf/BHM39Cb7woIcvcw3r3Sf7vpsnw0KZWqzwU8gVOWNcP/P8YJiHqxlG+gRHnBlzJ0dgm3eYuNhqXZVF79yZ/PLYuKIZKGH5CejmXnAtrZI7lBP+5MfV+bapSoEvO/nGj/P5PXCb23CUgOrIL15l+tz5Q4+0Iwkktw1WPtLmFPJ/7p9932WLK07U4/vT0IWT2/1wpvemU49d5g1tPrT1sRGafl7/7FoEQm7v28+RXhHxJCbQ8m+rGT93vd3d3e9V9l9EyWL3bgiLe79KCLMy+xCHmNWb+3u+tZ1f8WL3RPvqxF77v296giM77dl9tXdb1zbvCK/ZpGU+vzFy99jZS5n9U+771y7ddXbrXdG7vvLe+T7p230Wsnu2790vFYR8E5i+5/L/vL60W3WCO79eqv6ffp8dBHyAQdu9k9tpPpSXe+jLt+1BJvftvnrqEfISbrPwRkdyfarPlVb5Ppet+7BYJ3d7Pe/dGfZFvaqEvBCQnaW1LvIJutWL98nQgkOQ67299JbyFtZQsT1Uvny0tyWL0q3XsvreI9MRC2aCKkF235nkgr2l2nSffDW/3+445b6W+71qIve/J7JZXvDK/8RZhHJc/c8VGP92w5o4K4AAAAvBBmk9KQCvgr8w7MMu/Lpka80u4X8xLute4K/LIIEN7rtKOh0jP3R5l/fw8V8xU8L3e5cqRev6abOxxndu5XLt7BK8bZ3bZXSosT1Vf5iu5o/ylGmWt0wqvcFgSXI9333ub28eWEln+/yrykXgioFcd7e79P1mu96q2vKqJ/b7077+hBbfd3hR/gsFB9aDlQnS4XnacbveD2xMVu/Db1+iFe39oJb0+MNte9E+n/L3l4JO0YXr8ucNAbUHC6817/rUJ5Y0QK7P0DvJRR7vbucput3d1r8E3cYry4EMpkPpezguYbc+xB4RZ7D7ipA7/BHu96y92S+T3a/LIQwb37PCF8XMHfOrNsicrFNJFQJePZcjQmed/z0X/5IS8Emte28uHbu5+HNp8FcXeGneMmUEdfh377sRRyt/x+cHWv73cEhSr+svu66J3qLK8hIiqQs/y/X8EUqvKe7J6TR1aTYkw0seilgzVifrBMJe8gazdtJmogI93weWta9IEm50dtCHm8N+l/9d7uzXerP7oxee63cJ93u/qvdn95DE6FeT0m6+mUkUyS76s2l639Ai3N7c6WEF7ZTPfW+7u79KlkuxNKdArlIVrfeje826EkXq6Lu8Iv8EZJ/dO+mbdzxT239wTiw2hyX93i6Gt3u+66EsT3edA+qHoVu/d9OW7u8nr/vTU6oFl3vd3e7shF/QolOTpzN/QIpfy5OJffVS8udngnLu+726UEd38u8vljyfxZj/8N6XU3LNPTkO7+kxPLTd7L5G9ioLbG7u73lCPmNGse/lBLvbvd95snSTEmp9331Qn2SiV1iDpJZCL94rgrI93emUNPb9dDSFlh1bMOtXve94S8NEErNu/w377ouq+vr6ol4sECfy+PWviSufBdmIz7lmhgDKbRSBQIwp4IeT2rJEb3d/ZfZfao6dSeviOl1lFPf8EJX0ntTzeFslEjrX/k1ksTdYY8RjS9n8v+Tkpuv7ujfL+P+p08kloxNngsgAAAlZBmmAV8Ffix3DFp7iRLv3L3IV+L2cqBXt/l5dJD5eXO1hfzEyCQzM/BBwR7K5P2jfIIXSMM9dkbSydfLUG5eX2nuw6UlOEHJNHmvc/2NuXqhxyAS/+4RMjv4JtnrRoaXUH3s7BEVbKdsvu+frzeiydXk+/EiWsRzS45/+Upz0CH8d1/Cr9wShKUUrduwmNbuCHf37fuCUTmLF5z9YUVnT8Vd0ngpwVErelfLuTfcEeeFUEK+54eT6//6LedsKv7FDHLGVl87AIX2Sl/3cVl35bqttld++9ZSevZPSy/1g6S8uEy//QIssH633Y824mluXe3e3Su4dsj4EuKnPZBR4/2dyYR3Us37hnq7b/9e/RfX0/Y2SG+R6TPOr8RyP8NyT+CHqRMcu3YtOGOj9LFCby8YId93fP6FcV9FjdXnmgWIG14Oy3XId4T4aZvWNuEm9XRkp/SupijTb5By06Etmq79RZx11vQt3ug01T95I/3+5jXMFP6EvT0SRehLzGz9a7sEtyCb3d7DT/jCu/Gp70333f3+SYWU0/sENivdvpXqjoTSOg936rV7QI77sQh5DTevxPG7jGW/L8n1/4St3zd5031Jk/Wn8Eh93y6P105jvvsbqruipG7J9eX73d/TLk8IeKGPq2n37fSQTEvZ2V+r81ev1frkrd7wn5CT+90XgjKfrk5k9V/TBHvf9L7V61yel/909evQj5CLqsyBJd9p8vetIvenr+FtPsXlsWiXe61hT1eifd/OT31m76Wnda/baIuGCfeJsoh95f5u78mHuoe+MgAAAC1UGaj0pAK+CzzDGdzEr9xdHaivmXl/3Lhnw4aPTJRqp/8Z/L/7gg5Bogqgb34R7vG2d9us7S1/1+EC5cKBMrL9zjd9wVmygVyBs7wf4ykOcPtDecGT3TPzugQlSb0qk/b+/stUqq/hbzDIbtcVKwVl/u3CnOiZCu++Enc0P/pOe2FMQvIvr/rVru7+G+/ff4dO0NDkLeWu+nt3/Un69wnfZFPuS/uIvmQYSclT+bceOOifVfl+WKny7u7/kzIjpX50i3scJl/8swxoqR+9ui3frpzFfInJ/XbjWHOHmm14z31W+++7LBN3IO5Vh8//l/39+Xv8EN3+hL173HGFbvd8/ne/cE0dzfFoFwl5N1x+hi6+icbLCQkZBTi+7PSoz1lVKsbESLGC2ZKGL9SFye0m59Y7aa/MjfCJ3LrU8JvuXtwp4InjlnanMaUBNtW3een9roKSC7n/touM95P38vhHwQiKd6/CRONje5zb9PbiubIfz5+lv6f0mXdLQ30/WCLOblBvdOY2HZXftoEYm737K133cpO1+EuwVkLzhvL3u97Ls7EnOoob5Q08nu335SO/V1ornWqXWKuGhB1ZcOGfsWX8ntWRNeCIqKWn+DtNX71foTVvaVyEtFZN3T++/J7f5t+RbdPZ+GkrvQuvVQn390vb3wR+XKhH2ImZv8EhbpitzW5Wj67wUd3u9xvXu6BJvfu9X7wSb3fr/X/X2/eE7u7vftVeEfBERSf78VL7vPl/l7vtQQ7v/39tghLd79r2pq76dGirk6E2Jd77rJ6bVUo6CLL+CErOPFjLk+tfMIRX7bQmw24It3uba2lc6+/J7TX+Qjv7Ku8Ee8O757J6afffcsJk+/rwQkJ49TSez1afX19/f3Zrv0nkgjKZu/dydpQsT270t+/v6+17SJe+2nLV28xa6I4ZtayfIoa8Qe9J1pV/4iIKQusYZnt7R39OP7+CuAAAAChUGar0pAK+Cx+4RGc2YzFqjZl5fJ28XxnwqfNKi/5eW4XULhnwibOfZy6MzwEI29LZ54vO55N6X93sIlKNGl7H5x6tdtB83BF2PXLrsRdfhxvpxmC0EXHX2R8t6EoFpVSKVsxckfz2/wRT506prL18gvSqzPndl0Ju+7TvL/vlK93CnmCFSPbGX/2wU807RZd0alWRY1u0CX8v7cg/b3EFRpIbABnf6svv0n9v+FH7ihT27n9yne25YN+CX/u8spzq39693/gjJOHJdtWlv8EZWW9QnthQQ7uSL33s6iymNsLG7ywWY+4Ow2MFjizK0IwxF/Bv/oa7oTBEeG4MJfYye6W/6/GEw7KK6dXDqSjc+vXdx67vywU9cLu5ykOP9IzkXW9P9OE9wnEPsQ+z+723uPsU5h8pLD3N17X+8IFd+93Ivye21+J9uCiG0Xv8qjI/rr6yRw77Q9ZZVr7S66oEl7u/Sgj3vrsTy+X/CS6lBUR73vKyXeu309J5PS10qRa6cJQ0X+vu99kq/iO1cVBEaGLZ+D8SVye5/+rd99avS1wRXd/aq+EVnUeCW7N3LNP1SuSyF5b2Xk9pr8rp2pb2pwurP0tEIid11pL2vWEecEnmn4XZTNItdHYI9K7HTQJLv9r7WqofV927XRXqpa6ct7/Zcv76wR3vaEX8UCQlt92+7CW98n3UmW+lrX/16q169OUr76q39r0I+CQ1a5teJCIIq1pWsn1k19+rReukKhLw7VPH8Tc3J/Yiv++T2W9LqxXHfNHVaI+lsL6os50o/el5PSwq8p0Tydvy9d22CaZvu9mvoXMP33eF8kF0+EzvVmn+9Pv22ZFXSw54ga0twxPT4K4AAAArlBmsAV8FfmHY41WxD8XwxmXd7/NlHnwx5ScufGeH5wab/IzFzlu+jN6ykQK3ejR/cPFD7I51v2BXd3d+jPf/uMNcwi4Gz5fuYTnBbStKr/8FZ7tZqvu5ezTEir3Dk9vdGK/Jy/li/NUh3m8vqS1IW08wsX+9woEOaF3PTHlife5v8E/Rc49OvwQvH+/f+ynZv/R4SnFeQs8aFjyel+Tk3vr3v9Flvf6ovv+J42u+WdoS8xuNGJf/bFEh/fzpF0e+JcLncd7f500b76PRa6zc10T6yF1f9aqsT2zhecOS7rBLd+5Y/bbFpJehRbZYdMJe7lyr3e39HOv/2gRyihEzBofKH0uyghEmqcW3/RmKp9eWCEt7uaTcT8kEplrOzPKxraTpAm7G7lFX6668o1uGnsI+CQRN/t72Nu73DyLRuRGlm5z7j6YfvrGtF/v0QEpEcn0Arnj4Rth66syqxr17gkO5l7/EvyvdL3eSNif/RJMOwZnV3HiMbjerMv46TO9rX5jy+87DcIeKDir7pdwTSsJ++fe/rJ7/+i9XgmsZj/u7VX+Ihu+6+mRCX+q4UT9fE8EO9317QIbvexCPjhBGIzSN97k3f9eYbe+j6sfXLrRMOv6LvfQki9k9V/L0l4q9xuj7t6JBJd79CHkxq979xAhkWPe/qCQ/FdOMv/9VgkK79fgkvd+6sEV3e3WrdiUve6vtLl7LJ3fmX4Jd7ve8J9gm3Sn93YcvrJd/WrdfWbGNfrMbmp19tH7uy3fupSol3supN7hHwRGrX6yf1/o/6rV/Jqvr7q9Xe7YS9E+XUsuun1d5x/osWd73d9+Iwr5hF736YIz7r2rxNcuurE171Nd3e9pEV2X+pSSRkx/kwu/FsXN17pVfXQsr73vspO6honp5n+IKa8xJ9+SIw7H2RDji/V1cdFw98bAAACtkGa4BXwV+Cgdw8Ny1ZweUTJbl2DmfX+X/fE+XEr/i+yLPnoF/MRJIhmgq9wUQIPj35v7bRYdvea2U04kDuNKBOvdr/rdK2EW+79TgXf7z/fuEDZ+VR4Q8eyNqZk6tyywEb13/1/1vXhIuNyeXPxZSZL+acK+HQlNF3jauXoLicgrkPh7bkiV6/eXh61v733CT5xs91/Z2JO9zFh0l3+T3/8vPtE9f93e9PyxFN8v9/ibLe7wr4RFNyhp05EN8CTb6pL+Vu322CMrU73q355ShPh9v3l/+XL/ln+4YNmmYMkJWeuHvv611rJ75f+98Ehb36E9sKCHc/c1+0ve5/c/u1i2T2kjtJ8Ja3LKhHjdmI4JRNHedYIf47Mv/bYKDZfac5G8Lf6KlWxcUST8/9Hh2yfKGoZ7LvnYMDdcPcpn/8O+GYulXmsp96rGw5t/bVl9Vwm9uhm8Mo7GC9sjHzzt7nZf76wVWzrtSlMA+EXFK7ixyJlfLpwmUj+2dd+7UbBIaS35d4Kz5npoLLg1pYe6SHuO4JJk93N7qIve5QK/akmiqbyKbyr3eLQREDDx/yXgvjt/yi3u+jwRXu/+ipdZPVV+oS8FZrcfpt3Pm9daxaX+/onqmU8fqy73pcyyfd19sgMe9+k3yH3fWW99KmSiOV0UeykZhHxwQu7uT77eqY1Hv9Eg7chnf1XYmCE9m/K7yEvfbJa2qEbvc/wlzgku8Z7vwW3fPmX6q1rq+7RWyeqn76s6P6ykffXbTegQ7u7oR8EVtY9Sb2JFE27u974qgQle3ccny8v/pq9v16rr7Gyld9E9Ul+QEV33ASooRYy7+Sit2NKjv3SvdF9fZff2kIu/d+0l6E1iRujWPJ9uqLu601Rve7ucfu3sQTyOFV15PSrf+z6r9PyUd619OGPkXLIQru7gSIAAANIQZsAFfBZ4sZtmwpFVKpWVfuHLmOyFXP6lr8pLumGfBJeYFATf/Bk/BBx0NLbIEzGvHyWrCVgRM639162/ymp715ekyysWQhy+bCgB69zX37Ys7lzzGxXsTBJTeQPIxUn79ZYIt3cYpl/3wrzTAl3/bPaNXlv4V8UO4z1qE4fSXLl/eysKXKj5b+d7vx9f9l9/LBLf2dqi+QfvX+8ivospQg8zH+0Gs4duvq6emqK6L/168eQhS+fuFC/+WER1ryiPkufXfL7/Qy8Yafcjr0zsOG84/Xui90Vi9ylm/DpdV1dFhG93KSe7pZEp/5oJN7v+Et3vThTbChBW7hGbXx9OFt/ur4bTF2c/ZZe0NtnVjwq4ozFOtt1cHBCPfRjZ7Ne3oOpUbqTgYRRiMP6XxB07GdzCz/IgYEaDqvSWSi3jqR+kgkNd+kWOlv/hEYdQYczN6ETvcv+9BScLphy+1NDkC/l6mf28utB60LRs9sZx4SFHIrdEvxsHW6AEV66916S9b/TQlEv01wu8tKE9w7t32oRcm8CX+f7qX2W2Fqq/orDxPvIC3C2a8DrOvVVGPPX5f/fJ9K7f+4UO++a44Ei4XjKdZBdCZc00+X7d8E+R0xV8pU67/gl3d53NtDUWmxasUIKZt/pkHyL6Fnd93wj7sott/BhL4JyBqEGnsPDwHYkp0/rlcEvhN6Z3vXTe/d5F79Sb36l7vWbiyve8wfq8QYJdTfHRPv15bPd/a+wT+fv37e00Ce2Vd7vdMwh1vc0SKdy5uxnQP974Ij3fr3BCLn3KmkukSDJ+l/aNlT9Wd79H9eT3XS/sW0SqqlryS33CPgrMnWx5WKdW8d+CE7r15K1T22UEN9+6O1r3Md9/gkIbf/s8V3d3fS4pKxp+9X/Z0vbdXV/2Pz+EfBEEoce3DbyvBSXlxuW8VxXSho7Wt5Eu/VXPXovpzG2yx2LRD7usn2hXl736wU3f3u+7gI+Q1NV7Yvdb38i8tCW2lnf6t1L19ZLv19WJ1pgk7vXSM8nwj6otfwUmTjVN9vDX3+XQvVX4z603pJ3jYdd5B0Kk/r8TKXdu/wne/d1l+Rae7VyqryYV8ENa6pb9V9essRXvJhzyFU5zBZAAAACl0GbIBXwV+Ydy4t1Sj8EmkW2OM34vgt+YP5qQyX/2wwSkMxd5trjLKr79sExbIPaSsT69JBJDs7EGxDEAi3fTFz+1LLE7vBfnMW9O63xB73vffWI2y/y/ua93rdwnu9wh8D/hXzDqmQfwWT7RBhf549O0UTNv5AZft3oFPL+YfdjQp7ft0WC2OOf80cP0hJbzFcbDr3edgjjyd8qdFis4dvPnVXglvdq9yp0fCy+woKufY/PIkr33D29Nvdx1/McOc80+ozfiTuiu9/rJ68r+be+mwRzDz/XV9OsWT1T9zratFm6afL31ghzsP1CZffpwUiHdw/jrvNuRW+9xL/+N50oZitGiz3nTY2Bqdd+D7s3y/tPkEj/frv+mJRXbViaEPpNcUZxmwgLmDN7o1wS4Zl93u+18TfUtU3fjw3+s0g17wntjhgh7l/MpnGXuVeT0ki/cPd2iFiD7sKsJB+dl8NtQ90ePmn5PX/UJlPP41P3TQueQ0vjoWu4s7WYPBH0vVPtwV3vuntO7nioJLG92ye3X5K1r3QiVVoXbsWghfe993CPivN5v/Zlx73xmN093u7snv9L2un33/vd9+/vk9NrLayEh5SSHe9lI51X/75EWx7WvuP8hqHOp9CZGcd35fW+WrFlqjvJ9f/LY3S28E57b+Xt9iTl9+fwiX5fKUwi90T1yL6Jvf1+CEp2P87G1fvX9Udaa7Vjb9b29a0rZGCK+7t6wj0CMy1+/CJd3P7uX39H1dgi3u/fVjaJB4iXlHnT6khaqR28lX7lVvWvX/tdi33ICS68um/3mbhTMvL6+tFSp5P7EYbiXetF7V5RCJ0LeEuXeXPJlJ9JNfqzJ7afVCJbvhjy3uXLd1sv/mo9aTzilOkCRAAAAvpBm0AV8Ffix2kG9X2SUv67ZeQ2dvzVNb3e/DPgkJKPHk4+VPwjzRIfDv55dqB4f5+NnGX/bwgVAka+Noj33w+3x17hA2mV724Q9/i9SEQkgyMn7e5+WOkx2JVH3IfLN6/Ll/X5e7/Lz/8scjtfkwdCpf3fCgQ5u/u9vekYs4v8E8Y3eWN7v7QISkc0NbtRPVL9z+pm39l/MtKi/EZcgIjvGph2gtT8sqSdmE39gnHCRx3cVu4eSGMfuXHUCn9HYWO5JvlY65cb+7xbBBdz/wiX6Oc/KSw7Rt/vutete3+CTLw4kn5fmIHJV1//TX6L0J7Y0wl4rFZDRYOEiF+Se9tv3epqBya+55/aCOG4eTzc6KCaUP+uP0345pfECUl/78EP5rEf9QSGKSuG7Sw+xI0ELx/hfd9+6IlVaZKIkVKfhMlNLIimQWni0FJaEF2aoWFw82n9z4S+D3lzDLz6TfDujhuS15S2gMCb+IuVtDzQj7APKZT/6enl4kXu824SXRYwZdz9z+xLruO1d+/fSleFNLew7cEuQsOaS/QoQ3/h7hMpkvebNLXEw97pxsY/IHu6xjsb71i0n4JDZx8wLmFN3RBLv7Vbf75NkvcI+bu3XKWGb3dfdO37W50ezl7b9WY276rqzbu+qRZeKhMQ+93p/J2VM7t+ipWOiV78otqnCHlCVtW67iwQn3evzC73pVrq2M5vrMW99OEi7vy9LXBGaUNXervp9WvtSvVkI+PNPsu5/rVej97gj3fl1XVAiLd3+dYIe7v2Pr/o/rJu/eS9+l2k3gkvfW3fBHd7+Qj4IiVr/8EWGh7Hn1tlvz/sWXbeX1orHmfe7680xs16X+9F7qil3fSW0kzMFuWne8YBHwTiD6ra1rL2wmS93V/NBIVdX7kR26lV/pX6fvIV969QREnztRPav6rzFXX0+73ycJdAlMuTl67aQveaiwle9yEvyI96o2FchVlr5Ou6zXf8oISOluhjJeP4/6uXSVWi90pvFfJ74e+Hvi4AAAMNQZtvSkAr4LPFDMttHFr5f/cXBm9wysv06/rxfGIqCXx23C/mNjgUXD8v/uM8IfS8Ej5ZBmDa7SUFnnrtotOFZf38eWc45E4kNwillJE271u40meB0V3PtHVHwzK+Tk1IP9TVwj1CWXb9wlpFJZ7kDSU5PWj/96ljy55eS51qXfL/1gjlln6UZf98s+XcKeYdDsSBZJ7vZf+2xud5TvySIfbR3qW+lsH7fP6vL7e+C2POnu7vb8Ee48XX2T7/u/wUFMJon3dyppM8sEWdjSrZ5i3ekp7iSPR+dj8UW97v+Xtm4TftCggf277ui/u1o+vo3GwpLQsvwRd3qiek2Lpags4QaC355mrT9X/snv/S+4I+UNPqFNscZ3cO4W58727y/u+M0Jw3LknAm94QtrRWoTsDn8eJCkM/yuOul5B9uqxrKa7yXvjyV+wYSX+iNFT0yV/4m1bOvcdk0tJoovdwm98ICne7jLdWoLp1M66x+rE8wRXC7aEUjHCkzbnvM9HqwU+CQs7HXWCM27919iWi10f19ZjYczVqX7E7ve0Eb77vL7hLcKXepgl2nubuHI+7+7w3QSmq8WW2+96XxJnmNve+6ye09dm6XdHi8VFCJnEC6qYe+2iaDMYJt06j/f+0rkr0VyEu2I0zs3rkO7f5WLJf5aJN3+TrJ6J/f57I++nCV7y5evBDu/Ci/vyO9/2cn4Q5PYkEohM/vu/dDa93e8z9qJerhNxfRbV73vXT9lr2nyQQ3375FqEfRLfKCOT+bdFMouf3mf/WX3J8pXf2W977urPFm5VGH+3qhNH7vXqyVeEfFGtWzf/fmStvfR36F9Ner+9ZK/8nqjEyT74U8EM37Kziev0e6G+s18l6oXchJBJtE2+S+m+5AQlfd9vxCJHhXie63yEouVV9ZC3vr6/MdCO7mhTyCa1Rf/2afe38J3vxlbp9MEYt9xSniLn/1irNK1w3pU4iMQK92nS2dz20Lh03Q6YQCa3fkyi/ppncP/nwyXzJvyCndVrfdb+KiLt5Ot7y+X+97w98bAAAAs9Bm4AV8FfmHaTrcJ8da6v/NzTy+uXi47GXszX4Y8xNwl4Ok/BBeP6ZZHnwnzWBv0ktNk5eO/+X99scVzj/jNGC6v+Ezbw3F23YQ366Tc7BHHnrqn7plf4844R/dqp/8v/qEtq5YP6LC84y9j3K6C2lX5ff8IQusnTW6dvb3LPC3hEIDee1GZophE85T5Tt1uWCXcHIKr0f39wQlL7Z33Cepb3vJ67r6uXuuoU82Vs3l/8sKE53spB/7IWqYLe3Zbg3Cse6dSfbWe2yhS9mms3ndGLG7LdDSx79/H4kuCZ8xZr0jp6SysJ0rtXIDhAz+3di293yfv7WJvedWYWpL12fgszuePLYdlBkjvA+LJ6vSQlFhHc/tt72G5O369MJ733e9rBHPveoT3GCj3u4bedh9OkDG+fvb3qs7L4YyfFCWz6cNfPX5iGBNxvGiaoshcNXT+xvJ9Pl04JTN5eHZQuz8N1bQJof7nTEjovyQ+R/dJuReEXv1hN/MsvbCl3c/c4fry+3c/9hSW4X8BF7yvA7aug/4od7j/37YKTn3+98Idk+7FtCO76ovvRe09ikC0z5T274arLCYvP4y4P8lf9iVCXgjMnv34KbuX297u/q3IJvf1244zt73vvr69r2CXxl03exsFr7Xkgiu/W3F8Ed3fqEfEmTp0z8qO6+JBCfd+9owvmbo5URu1FiL3e/WCUtK7v11r3WCLjr3/ujkBdJ/e/VTa58v7VJwj44QT/lYy/X94sS97vb25r3yftk/ghmr3pcuv+xdXk1BFvet9uvQj6/rUt792tasUte1Vll3u3+idq0hq/Ra6O65F9CXixCdb3bV6Ev4iyu/qlbouyftf4IcIfb/btLInpegREu9/oXu7vvuaE/IIVdfWI9daRRLu/piCZgeQPucf/tHbJ+/vULF+XCFV7psEnd8uxO9V6ifX+Iwz8mTXw98PfFQAAAA2VBm6AV8FflHcMMny90fyykrBh10qvLDGUkVD5p+7kzwz4cJjpZWZON9/XuEPD114CHWM2+NR9wIUOP3mvMnY91u4fKF3ftQ8ovSTON7/Hu535Y425TQ/GdoGoFp4w/pPp8srcsecO0vk+1Xc8h9Vl/ovJuf9bTglufv2rnFl/34V8wyHljjYnNkVe4UJIjhhPa+E2fl3d3uRsH4LZg4/KcEdyb3bTR2eCModrvfLTdNlK9905Yjkbc84UT9UX2ea+99YjSe7yofkveFPGCJN87zh9E2gn/hPYcle22H92g6f/0U52qapze3uvDp3nIMw+7+5Hr565q8kMU5+bcE3y+g0PF1/jP/i99mOdxGQJ16lTSeJqcXWNkJW9mNkHmstqptwzN8D1qN3081L8sVLh0Mq8sn79oI3u+HZJXQYb3Chfb9oFIx3dt63vc0WOjxlzstuAx2J8iSFLOYnp0uoKRIGdla9eLzmDoZw93O13dPVje/fSbi4RI5HkDXMzfOrljJ6bTnTWFNExAcUeWtrac8svmTHFxbvEoKSpysCIafnHb5w0EPtr+8bPxFQJW4LO74T8EuZtN79ZP692UFM/uXEw7Rrcgd+z9+sFxL1gcWj5geEvFtvb61oEZ1La62QzZpWnZrb92Xu/LR/i6U9a6wTTBm939aTUqIIe+vcFAt7Jw/F1XvfrX/UgLL3z5299daJKEewVXdM/bX8/rdFWCmm/3GTh3cv3ne5OzfqYS+y0d9Y8Q6HvkCkv9bkv++T+l89X6PBKQmmHDA3d/K9Uf70gT3d97xQk67Bdmve6TPRQkd72q9hF+RIU+/tF76V9/rbrLcoa9dWLlu/shDcv6aNy0hHwXCI3Tt3sOyotUXyqloQW93fk98aX/8TDd0mP3vovsTBF5Y28lZqoq6rokm9wlWvfsmm+joExc/iuKPf0gRXv7ouqLLu9UeYmmb9XvfVItdlKYrv6lNe724kmKuX3u8JeQY9/TMXde6Eopaonbl8ircxqv5KtT/hLSDunJ4q+Jseb45pujr/el1XXLWTvfBNyaQO5hYw6x0WPO5hLvd7+T3wrkgo7vbLO61+CLe8XW7363d/666KmR39iUCK7u/ZPTT+pgQ3dp5fIzjGfcLrr6ybr19fS+b6cN+SrX+HvjYCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8AAACikGbz0pAK+CvzDs1pfN40Wnza3vfNx9xq+Xbthj0RKpf/sXLJmSb3FH6nRNn/wqWcCpsRFPp7v8v3vbLCBnw3EZ3Dt0eatetoFrbUvD8tD7cJHqfpy5yz3P6k2vxdN9ByxeX/6fhpfF8s/mMliFPFDOE3xWhePCdl/u3GRlewuE2LpvQJElJqtwf2n3eT3Utz6D2+4KvRuC/7vhzV3jHf4jaH97B3Dy6buEi3Ve8od+EtBFQQzLnE+qxMEk4KvPayfy96tAolg/d3On5CnFQy9/vy5CfmCXDtya2i9deX/dr6mX8KPfGT93HDt7XrvSdCvf42eJvDckp2WFlWuB0q2EurN6zQd7qrO9N06F5eXVDZBAYSQTW5/4Kq0P3l3KHHx9FTaP9nhaHZCStTCKKt35lqyVxqL/b1u88cv/0hMW9qEYS3CAq7u94ZXZ/ZWPu42E/laj+j7twTPme3qj33fq4TPrW4FvoXR2NJ5K17rlk9JxtJREwjmXk9tPxK0Jh96tk/c9vk9fFYRXtokPoWXrYkyG/RbEG/+W7339t9/ViIJd55JWkLFfz56oWV77v1XeCPu/a3ZAQ7vy+IhB7khARd3n5WbyfftXZ6K/rV6t5tWR7oU/5ei17t4R5fcEd74tenW4JC3v2/Wn+C7d77v0PQru976Um79JLVVYKr33u7/Qn4I5/dL1LLl3kfaW/onqlquSjw3CPKLBEtc3/L8lb/oW/m++T21VeYEMmEvfqXqQEt3e93fsvVkxz0ncK/Wb4nbTo0cbhD7FDa8vbOUD2uMoS4WQsKv9S3pIneXV/kk7v9a9MFG97veF/IQmXrTQmcon0v/VFJuk0oaW++XC5xERu3NyD2CyAAAAC4EGb4BXwV+LHaZ55ce/el7ltb/Ndo1frC/mJSO0gj7/+X/3CXKcJMgYTB7BfaC5f3TLBcUf5XVUw8O08zUpl/XsWbngnQu+dDlh2y35Fxubvcotyw/2eCw7W9OlV3Ft+L7tWpRKXy/wlfV8+5O7hbwRhDYYf/d7q47gmKHZOOsd2H0cv39x8JFzv3yh+qy2R9+pd5/L7teLLy/CDbfv39y8j3Cpf3yx4zmofu2DYPuMyvbvZyDmx0VorjTuHoaXt9Ysstmj7vyvW7audHqzr6oQR7+dulhTbChnd2J6s/t3e/LdbjsvoR/0KQWgjDY9e6xBxl8dhJ3cZeN2tKIsg3evzPyr/Ve8khX32JWrJzGOsEHv5Pn7OgpnCDG8yHq+X7gxUj/Lpvk+sHw8NY6+npvblZpXwQ7lEdii+I/wluNu5facPM5u7vBzL7Zp3/WHtxsBP3dvjQBkD5ipssH1LY9Z2JHo4R8/5oycfaFEn+NciN470ntpFe7l21xmob+k2yoLykg1mA8cZK/WMzr/00NtAr8cL+3p3jLq1ZdcXnSvvVsEZRmB/7txJDBHPGhnl23TYwnbeE9yWF8m5So2V+2twRluMF9kHeog77u7unl8JOvBGIL/XVgj3e+6N9N2+2mvr7Kyn5XyenRLiujMOK38JuuiXu6oq0r319deYpzz/pe7F7fJwj2ExBmH8/35Cst1qmwTFSvPHudfVgk3no6vBdd+WX/osEO73+wR336XkhDoEVs363tGsl73nTghvferEwWn3d7623v6ve+tX6zGxi31+6P2tZCc//d9+Srwj4JDVq3qiMon1T1/X0V9aFsqvyVb0hRHf3Tv8Ee72rqEfIWGan+CU0k9b3ip/1V9S+T3fYlEPu/TBCTKCxkDLq8fvfd3vCyXFi+tF7q0U9eqJ7ovJ66/gt3vOSf5lyYWxSZpf1Qniy20nW2lJ+1/oqda+/tp3vqnS7JhfyR7p4MIAAAJXQZoAFfBZ4sZUCTfK2oCKT5c04vMTHWuUi+vLzAwZSQx4s3NG4ev1sdL/7hDw9mYsV2ihal+G0YguvwkWaBxvyuI+lfGG/Gg/stjo2Xtt+y3DstswOVeeCOU3eUlBr3Fnu73PPvcs29Kvwl58ZK71+EN3l97mFXv5eGHULagqX93cIhC3yOG97z59Ngik3e4qssWUPr5P7v3BKU8PO9OqabL6PBR59t2k6RPr/oTd3e97rkfmiFfCIoqMxaHl2sR2yTpE05f98TuY3BDzrvL61ylDc9u/8JwxJJ/HbY+/Tf6v2Lk7vV9C74K+WggrVuHqfu867UEcsYZFT7b02CGfnY/wmX9tvGCuEVkw6N8mo3vz8//mjYWHeVy+v4ZOGIsrisRtf/L7e9gkmGiqvpLf8xMNxfRkKSKncvd+t4blI++E3vYK77u+0736KxVzFx4L3tmX63USXCfhvr2f/KS91qj/Fbutafy3fk97pq7IbCCqXk9J7rNye9eTKhbK2h5rvvdy/CPaJ267Ne83sS78n6Xn5CXoej9NVuRA77SaXrW/SIbu/iI/xROX3l9+46dvP3fp3CIzeTvF9bdLVzF3clnWT38R3vTEZt+7/KdO8IeCUZemtO/zhItXrXo/6BIV32PdXqxpXd9P6UqvsEO76uxNWAU1BHLl590coJL7kTJ6/+vXYur5fd9UTt6llvNcm2Cwt3u/d3aEn5Qrr+Sy6rIW7y1EQmusgyNWPfEdWTKt16KQw19JWTCnPr9Uq3onSUfraFJEryVrbfwxko/WvJN4hzkcCRAAAAL3QZogFfBX5h2CX7my/+WLsrmGh3K/8vlz5fG4gMF/9wiTHyeihppwaOGtJe11f+4woJ3ZNT5/10929tzB68uVu4ICNbleErlpmLpNDQa2h5/lu/YmFrvcOs0WYZ3xh7SsJNqv9OqSp1L+9N7+iluXX78V0atEC64V8w6BMf8/Ll7kl/u3BThluBuJ1mTq7cLHtXy2xwx3HbfhlbPzzvLncEuNr4FcwL/6mPcMJ4DunspI2Lf6aVhrsvxGWJkO8qGFPFjmU85pHdKEr22xG3OEuk9P27y+tH/7mI7AjAVidb+9/9bQm9toOkEvd3ewgeJW7uxNhu41f6rKw9KkQsyBFXtmwKs0edpvf7pzwYFGBQdhvd6+OL3JWi/00e5CZA+RVJ6Sn+SQk/KHrk9JLV/f4IcWMqO2KXyFGuVBhJ74KhUfEw+/m5fhH9U47CktwndSj+YfMDfTlvvrBR3e52Lrpz8L2l+T9f3RaZaLWvJcy/tJ8eaYXXPj30Z/J6pf4QLhJ6Nz4yeLr730XeMwh4dtR65fGljP9vrJDaWmCeDHhFrTxv9z9x0hc6jyRd+vs/ryekn+J6aLwQnd+F26NLbXQISQbmdSGZhs6Eneze9/L3WcwIr7sbyxtYoS1MR249c+CE7y/FVZCXfzQR3u7HX19fbWqczBGXd2GxaBde7vv2r5AS3d93udND827uft7254BHsQKHqR/TbVFo7dXVUyvfa2RCbv3vqVbdaJ7u+3Xu2kXtuortOqLu8J54JSPe771VgqK77v3v2T0m7o9wle+7vs8m95PeupJQQ3u6A/TgkNq/ZPbS6Vy81kAlpEpdvpNl/aRX64Q8NE0Su/xD0s2gT7u7q99/ZRObF69tibT7vfmPWSqdV8gIibu9JqWl7oQwQ5/9uvhLwTEbXm+dKy2er7w3Xv6rrtNiaQQNmIh6CYvyDyW/JYkkd68khs/hfO1rr7LRX70U926M16sofupu4WW+QQ2ver9Eo/d/evXZMOdO7PD3xsAAADLUGaQBXwWeKGZsWUmU4679xdLDLTIZNa8N5iJSmv4/rhjxZuMypoBGa88VPll/CHaGip03xpp1+ci6beZf3srCBc8pbvHzoxvKcpfuNJuHERh+94Gvoxoq7quX/XuEu42CQoIfzsxgbP3vhktndvls96eWViqTu58sVvL/l0CXyS5cRavlpT5CnmGWiVaI37jfh25cjmOku67zzIJdRyS8R0KXLfqd9mRKapUp8/rdxt97cdClnKAy1GuGlrQFPd/YYW8/63UFNEni7ysr6BkKXkXfb3KXhpch8Fdh2nK8y+OBZ30pzdZ47nIvLC+R59l91fEc27kQeizdN5f9oveWo8rv7mPz5/E8saXc7azhN+4RCB/bhN9wtOf7925977fR2y3LN9av17y+svSVWvZPdPJ30lRe1XWUJvewoZ3d3rDeX3MIn93MWeG1u4LreUsQagLZSlzxfiChWB7rw58x6cf2eCTMrkRP5Jb1KTqxvvFZzsPyVK6/k7SewpbBNpPy5fj1HEpC5RTvCbTfb5PaVzXwQ3bCDf1/nuF8IT8EetYsv7+FLnX3/Ovd9ex109TbnNtCWWPIQPovQHY2K8w+BX9gdR2VbxJcwcduvOXvX24JM9Huu8EJMt+G7fBae93v7uzXf0T2ePNyVc7/w03RunQpVn+WCQW98XY0iJ3tAk7u8JLdsSQVu+27336SW/FLWuYrucd5f9cEeX/fmK9+tG7L4rq5CH/3+jsW3OnBH5fBCL20hIh3vd07+wRnTb4evIVFy2nl/kJl1MldF/1wTFve79/cEN969MEV93y/1kwlt6XcEd3c1+PwXblL73seoJL3t7l7v829/QJN3/9wXXt3e8fo8Efd++gR73r1WtqdZd7+glvd9wi/IRE/Xgjve1eCMt31vLwnd+9+7u+kupjbpawjvfOrMPXOP6L/X9JELe4T7Qjqy/wSDWn4K933110CGky329LJ+vnX+Ccjv7vXkcJ0xfiP82mQmZ+/oSckvkv68VYsn4Vfb9N+Tr9CJqp0TtbSYId79l9+0RehfohH3l/+esnSVCH+LO7/Lmv5Ib8kur+iSXeHvjYAAAAx9BmmAV8Ffix3Go7Usm4vLVevf8vUdIYY8xFO8wo9iX/3CPaOZLEpZcEozdjnX9748oF317/DzH6T8vvluESY2fc4Wfy9pqfctwS+H9qvSc4n9/Ly+S9KyrLBfhOnPLycL7jxnZm2BN/z//un17YjjfW+il/Tr9/11eXCvjDcrXvSD01nXlFVAu7XVeJKPlTmlgsBeXynHmfQr3oJHdjuGEmGrPffwVwTew/PvcOy2ngJN+vPDfgj8v3567GxfLco5fvKV714JzcJf9O0Msk6Lo1+EC7EBrhOffuz0dMj715YJN76hLzEvTvbaGkd3cV2Aj6m33uET7U98K9fl2uisKQ+u77mUsf8rBR24li24ktsrQHgl0d2++nBdGRP3Jd32snrm/5Prz9RXP2GO0/SaiY3DUPaGnf6sLzlpd6tvgqUs5VMzZ/8FnHzz3HF84MGoV6Fh8E2dc87t9/CqCkqBIJd8qQm976yDCj4ZfAY8TGoB24bKHpVvY0KFcfRP6t3BFtlBTj2V79u8emPJ7v12Cry3KfO3saYcgisMn34v4I+NaF1sZ7pwSFe/vKiUN+Xy/SRGhHyyeTd14JczG3fLq+36Inc537hAsw6+7wm5LG+/VEr8ERd3/0SCQkj793Q4lqCT5LBK+r/5Y/94IToosF9jrBHvfuk1c3+vQj6EM35YJjzd+927wR8/14glUXXgi83fr6/NBYV733fdtL6xdYIr3v6QS3fe4Q8mN5f4QEVl6+di+uiiSXV90MViD932n6audiSq3pu9+i1etbota6wR73fa/3wgT6+J8Emq7b8h95SOCi+96T96uWT0v6y3v114Qu/sQXtXz1ln/2UmT9LXJ3a4Id7shHzErbl98vZK16lKu+mTJ6TZZcvepBb7on1XX9UCQnJDtJkK4vjTP5WPgj3f/f4kt3d3wlogJhVazeuzpj8vk4rs53Xsv65pS8/daN20kVMPeX4yX180yv7Gex+qkFc2bv5MK87O7+itG6ie6hG/os5eSjVuvyeuX/C2ZEuuq0WrXkXyeTDNiJJaP7ySZ7xHZXLaeH/jYAAAFAEGagBXwWF93WxYySxnJTqzf/LvD1K/KR7lj8vd/qVIX8MeOlPaDSoXOZEEDzLxzo5f/bG+K5cDF7vDU88IH+7DfTB3IF9VezRbiQxwPfE6+2f3l/e3BbLB+3t0vuFKZaf4CjWf0k41u5tuOXfcA3dnPBnYXoRatFlI8EX+SHKa8sx5/Sr8J5O+fMufCd96NziPxnTH+1RFrb1s2+EbWeYV8wyZIaSES65f7bLG6meMTKTmXU+K88HjSz3p1AnV/dv2I5T6LBNrvmD9oHxbqnHw1DfPu+78nrVvuET3bu9oiW9LRYjvd3MdvWp51J+kq+EJzzkFreYzy/Vfk/VrVSz0u98mS+4Uf0FBnLJ2sXt63DKUj24rdxO7CPnX8+n8v/X+zwxJ/9i35NVk7vrCO1DCSHLK8SDv94H5S+WLkL2zLbvL8n6yhLwRZpEbde2NMhFZdnc43xCXXze9uD9fzxfdhuk9/J/S+dh7Xwm6Q9L3UVtVc5YI+EtJR3cvqpv8cUfL0lRbopHOWslCfMfMZ7a/LptSwUx11Ki97J8X970q1iYeR+46HLaR/Z2n9YSp4qR/XTQm4KjPqVKVyd8wP0fPPsvqtKMjEObLD7tQet0OR/gIKvrnzeaci7j3hpFDn87Q1c34JvDlvXRUN+HFxH0O3DqcO+EO5oci9PeGXxwQA9yrNf+2oRsyn0uSCGHpb0JW7oJBCtdGdyl5ZT3maEtwVjnt3d3d9jd0Vgq4Tb25R/w3IQgPHmbEFHh5Nw9e5bf9P4Jy7kF+V790yR+US4eitFbzexfL/vicZdfvbk9WrsWtkT9HX+hN8vja/CXghM87ct72EreP0u++u/Ene7u/qkSq1WvUxHvprch33+CG99fgh7ux9Aipvesv++UXnUR/YoVxlZz7/Ekbq0z4rOddtaxYKjn8+6TuX3v9r3R4IxO7kusUaZUk4qlhl+q9j4+8LcKPNjcV9O7+x/vCRbbgsN13264JDOHUFR33RYUu9933vnzBv3IW99HRt3f0Lu9DywZ9sX3efL9XOonXhDyDDs5/8vN95TOQS7+rBbau77ue0CS++Porc/v39E9Eq3Wa7+nBDfftr69qmywQ7vSLpC777vyYQ8FU/XL20Xm/63BHeK7a/BLlRv3ex05b32lr4qCHd9VXl8y27Ly4Ht4fCV3d93WS+7rEYQ8FWRum2OV61iXfRmCjEPeFLsV68kKXFbu7+7d727F2Lnh3RkaCq9EX3vukCGyfVCEvQ+XCs2y3Fe7v7HwpNp/8b9bu583emHuqO9iZc+P/HR/2+W8o4HTYPSV7a3U6Pcv72RhMm5c7uYS6YKJljxXplknvgHcxFTSnajrlV3xvrPaF7+qKL6EvBCEFU8Zxb0rCc3t3a71WrJkyao6R+hTyHXX4Q59kwXkxcmxOlGwgfNms/N1exfwmIl7e++ixBbRYe7LooK93ek/LkdjvqKOln954V8gQEcBDr6P+u0eK31jLnWpJw09u997+ghuX3bu5Ne/Vx+l3CviNarVfgpJCrl0r86mnd3zG/xhUhDKXC3txJq6S/XHvMntV/iekk9fNRzpk/vLJoVp3vL6SWhl98Z8e+k27I/+OhVpR2PefhR5uO/TZw8uo94wv4gRH2XYTD33+I7R8t+Fx//iMSVndrW+7n01uu6UUUQUtHGeS3TQPSw8n8sRNp4i2+L8EMB9fiY/BVAAAEkUGar0pAK+CvzDsd9FL+vhEvHd3DcU78xl8e5STkr6/Neif5ePYVSfBHxo4ZkhjykqHpqNvuHihOvJXhjN0GQKRRHFmxumnHT1Pn/w8SmicwGPvR8PK0iuWv4e+itM8sIT3y4Vi2a9/16YRK7V3nnz/oTyfpdUoIc6otPQduHlmdb7t+Rq/msla9Sz90yxCnmHRvK/b9x2WW7s9CbYmhk7rdwTylSJ85dn9v3BNe+p7vT1KhZZA8+SPdfu77wq/sEBg+LhNYu1JI7+xFBH4xuK224unL/u4Rx/r+4xWwu5199lYKy8w+T8vPT8222rCW8hwk96ySxfDN/N4+cvvs8vc0d/5vK32HMwfhFy0haxf5Pdsv3LUj/Tntglu7vV7Qn6J29txxHd3esBfJ0e7uf/ClyeQLSKZT96Uh1sOYfl/wUlBFo903GUrge4a8fatI7w7Y/BdMGtVZ40Wf6RqN4rxs+vX5f/cPZgfR61hpLstD28Qhp3yzIeo5f98Z4ViXsDRGbXhtDSb13jh48n21tbghxhAaV4u8QJvfd78VhHwTCp9q9uZt72Mu1S9Y3TSp8TR/Zbt+Wh0Ev/uNptyHYvI8MLkwBp9XVl/Tbq7/7KhaN/FekbHpuCzEyxH88b33gm+3v6ZPkFNfWhxen+UBJz8End29wVyB8Ir7Mo2Z1dyipSDmL+e6v79e/UI23hdC5Bd/bOXT3b6haQ71S2gWmU9cF2i5drL+09PSdf/wQle7+/Jpvv9EeEn+CIlCh2IrB9mKX/4Tsu937E+/uwle+91ZZfP95DGNIXS/e1k4KBL7c/+hHymOzOyVB7Ic1L9DaxVlRCx4Sv1LvMl6iL3uehTHrTPBEU83sZEZye2/zGghM+MpmBnbJ+5O6lrvs/8EV3+p6/ywh2FDN3t29Sxz/sfi8n73rssEIkrenXcFHSe929QRzEdeEPkkHN77ERUf77vye3/lgq7uzvPD7bfoEcTPnB18zXuCLd9a9QRXd9dDe/oVu9j3+Td4R6WDvBMZU1TpvZXQJDve55YTvh1przhnvusa+q+zE4BL6odumL+gGON4bkkr6s4D5Zd369Qhd933vrzxV93u+yom9wj4XrOlPJ4m0WXhsooyiaL0jS/BNe+7vP7hIe6PNqOjrVwhdFyytcvvbUFeWlE5aXfFaf1BhGVuhMyR7DyZSrw16z3+fn3+CUmfN9+ny+5RCtCrndynJNrKiMb3L93SrckRuvofd4r364z3j3d6ZEE8jef9enCPhrDt53AJaU+8Ep7/8v5x+2NM1k71WXKToPwavRYcfy7tkbjCmoV9cOGh980Eumfxphx1u9fiSpofLnviuIe3XgjGihOqqveJG2II77v9RO9+aar7cE/L/LyiVWolONM73Hs0aLsgq8gP0ugm4Hebq4r4hqCtEI2nnrUNJXOLRnvpMaQqCha2G5vniMJGW7H1SytVsAO9u5U7aHEvbvd3Lj2FX4YL4wUU4/T3T5Nke9blPl+8lUYyU/UI7u/LyG7uF1rgqm3xnot4voBvd93rvRQBr1+lRWcmGy+aT8FkAAAFUUGawBXwW+LHEuONjymaLy/+4ulNLG1frNxdayu3vcMeYmCT+DB4KK9xngRNemS9RgGCDhn/gwf8VW6d91aXpS/27YeO0e2kcacmCy9buXKX5TJLXrdoYbjqYfco+ZcH/fqLAJuOqvw1V54LPHC2SdeZb289l8vJfo/dZ7K9/w3eYbTVaa/8up8wp5h0opGRSV7YU22zhvuoQb14l002Tmoe3bh+draX/HsEu+4aaanaKDe+CKQ+G8x9tJO/Wr6W1ZblVen15fRU73hXwWGxh/m0iCsbgXhAaDvs1u4K5bf37l77Q8fs+hBcz5mmJ8nrRfbZMu97aQSu78M8zl+XbrXlhM70r3deCQRPsPShMf8pR8U70/1TuLvhhdBvF+EuT208J7YwUW7u7gq2lu706XktynqrPGlsiwQPPz6044Zf2zYFbcN4JxHrNFduCtiLDiuYchP35f6LBDINB5ju+2K6Le5wtoXFiXvd/Ruk88ImQyiZFVyRvsUCz9jVXjciY2JFOphwegk/cWI5sfTe9R3BP4dSVqdY55f7rcbwm47R7fNjeIT6+N4Ru0WIj2OdfvUjxntOPd6aCtxLdmm6cY6rUF9XvlnKHu1n+l8kI+Cnl7eftkf7vcKEeXu5cG7LgIX2X3W/mH+Wl8b2w9qjR7DUnIbi++IPmOAXWP8Qb1iP9v53v9IsN9U/Em4/OhtsWrBFe+X4e2ofz2OtH25VTlch/YeXv6cEN736ynDnVoqLv7wkQj+xSC47OtSW1WFrNJPlCJJOKHRoWEOX0hq05/zwTld7u/WX8qrBRpXu7sfRSckQj4K+bk0epf4ri3+HiTeb2f48/X357jlz/6BIWOMu9dgnhy/48M+7vBk9d+76sFOcvMHTDZmbdFbv3WJK8VxPpmQcVBVveQmzrlUFh1v1DwjLpe4eBBTlO1X995Uz5/osSfCT118/3SFf0vbpNoEWX+57YLr3d76y/E/sXnyEPGBA3ire972mN2/bCBxL91e7it6yzH5a6OgREkDT23WI7nq+/IvJyf1+ecijOn/012diD7ve/rorJu+unH3dywvfc/CK7x4imV+f/L2+oIrhfj1rxKYlRT+rH3Z73M27J5PSqWjV/E0iu+Xqjx12nnnu+90/pLsVnx+lL5PWr/L2QadKlLSBLufKt3SrWEbpyxe7z97k/r/CVaPvcI+L1q7emsThS8Q4Z/Lglyk7tu3Peo3csN7977vI7k6/+hpyHy4e6j+Y2erdylvlm0CVC0B72m0bbfY2Ebj74v5Ilk25nw9Bofk/f7wkR3vkX/csfyfX+o8k/7or5pewV93LR+NyJ4JfDsXlTL/9BQr2789DP97zt+Eru7pb8kIXe7vd36tWRy9zPxGEVt4UEW75P5mL0/OsE1585e7e+T9Xrso825sr9yItj+7Ct/XrNpXqss0ZsdnlJ9K515CTf1Uib3aF1SAJq+3E58JeG6UsFtl95NS97y/IMFp8JePIZaUipZfrJmT66CIIVxE9+87W+tEKVlKf3/FCd3d+t/fL9fbsj3y/E+piN0vUIkwEfvQOzu0bKJ2DtmUv1wtv4k8rS2PY3+EHprl/7MZGTwn5Cmesu1mEYqq7VLL9/oRF5GMHu45277DqT7ueuixhn0SHvNtEf+9Ulku/0iFve/ceW7933e6V9JVhMQ60LPveX6+i3fpWQGllO28LL8hnQtGq6Swlu/P9qXglLPlHn8qeSJO5VI/c/LnJCOO+7N37vdF0CchaFplmf3Km0lIgR8/pQDHqkS8RGevccjYevcVNRcRJI/qS49CP1fGR0T5h/4yAAAAFBUGa70pAK+CvzDtmO+vyxZduNSCF9+UlVhjzEkPGOLg5y/+2EeMTa6anGnGmHzdxuH+3v3BWeMkjoWZ7J30U+4IDSwM5WWGGUGstz4cRcJVCGeR/2mJnjvmvJIw+kKcGXVNeXRP0msTwmVrdmvXlhHtJLd3L3/lpPcKeFxnCbrdo5WCv55heHv3/G5xZXlQoSN0wSvClkrTeQpR1xfdrXvpSAmQXz/L+7uCK4euRPttk9Uju9ydyltYTLcYIvh5df2nbgjw7J/0qS36WuTd9fk3eFC/3tgsEX7nefZmIXVSsllt2X39sTvQIm1W4OvHCuwd7YJSlzIPhteZ8dsnruSqlljL/SLecVHdyf3+dmjIqvp+/ZUEcIvpfgIl11Svhqetu4V2xogVu7u4O+27uCb/4exYK93WYVnz+0axkKj575Pt6LyxxShEoOIecV444tU8fEz6BJve4l/l8NzIMRY7jDXdf8nob/FTr5RKRZ/4zi6xXELcOec0RywtciybXopOvYPBhQk97r8KXDjSjhuy8LnXTiX8a28hdrAdk/3fiMy1x2vvL/5cJerN+oexZRP/cvcQ+hF6DVrlgWf7w7lnpMs6ThsMbhpsJcO9WyKxm8uaZ3vye7XueEIcXue4mWv7deo8pRAletrqDub3q7LBbtNPw/F04vbGzdBdO/CD57bk6Jpx3b15BRwfralqhobb/8Ee6/e3uCQoeRaT8XuCIj7TtdY/eCrE/MXnu0C/syt2Ib/oagTWsaUr2/9y9Lgf2fm3KtFi91g19CyXfFdwk+3BEQrLxNDVgktVHm7v6BF5V8uhr9Vr3/BEV84s/4IjbNXy3eWzx8Hr91b2RWN/IvQh5svp17hAhF7e3c/ve/cE4l7kt88O2kugVb3u976/EFfezvfLjLu7zB8pzu5cbbv/6CNp3vjZxe/5f01LCWck8Hoz7XyhK9K99fIEzSJhxLyOrPvoagS3vd+X0Ei877v2eXdD+Xf5rR/X4mx3vl4Q8EwhtilZLv5a3y6yfqLEvfc+dqLLI728v+k0Eu7u/8k7chK0xZTk/BIVyxfOLs/7CRNzzuzf4re7u79P6NPL1rY/bu7u+7hHxMWP839iSbaoaoay/8uGyunvWrus376y8Ydoos1eMxFI5xtEGYdK3t5PXH73BHLB7lBk99VXBXIbd85t8Vvot7cRMg2ywZad4q4JDIh4/dUqVVuof3kR5CSLXWuzNrgl3tiTr/oWV5cR7v7fL2/RN33+EZm3fd8nhHzEc3l1F/k4XE3PJ7e+8hcSJu8elfvvJKav2eHKRhvb5Np7bbJCXPBu9Llgo1enqm2/k80fUParS2Pim77sPz6f9Nnq6vr/xZpzwdp9hH7R7/Ce73u8v+R8I+QsMIlf+HjQa1WTyenf5frynfHcusr9Jl13XeUTdyv+bGe+iXu/5N4Z3+T+93aGG3nQI9NwUFED+ASiNqf10DV08tB1D4PvlL9fQKSiP2dP/++7IMJUnwgtJ5GEiDVG9WcctM0QqT9d+kRzJ99P0Ua+lzQRGe9MNWrid3wj0sl7f3CAuWq46tNm9yZNKL0EhWErB6nvf4Lb3uzemXUfvd9zKrnz4ISggvXlvF86QrkIiCu80Rm3c/b6/NdEXHX1LafevJMJ55V5UH7vlpnP/umGdP/VEMmX3iDTR2ReXBnTm3F6WF/VOviPNPI1pauK/yRHRuMd8FkAAAAUwQZsAFfBX5h2Y0/xZaQJf8+9PuLJukt1rJxfjvj0MTr9f5uNETC/mJhE8A5f/cEF7RA/D9NARvfa+wLV2t/WVVGu+vsYfPqQTeSr3IIT8NS4jD1tpeXh82uncfEL/gR3dHX/h9yHgjslrLx//yftrneCO0ZQ8EvC57s8hZWv/L4/2xMKbVKK720h/EZq7GqZff0hN9znL8v/WWJYF24VfuUIcPS5Jf7boEBUGAQ7qHH8gbe9w2tRayDrXP9NnuP/I9ikLIVybui9qv7+++qEnvd3fL8u2kXmkWYT8FQrhpJrMle86KQJ9euH32e2On9o05DB+lKn3QJIaLST9ac7xZTJxxy93r3BPJHJF7flv2wR8o0sVU+mzywSeOPPDJ/RPlQJ9le5cSwa6tUlhO8iEpN7/gm3HQSPefwze+97hMv5ZeCwQ7gIl1vnSH15h3fx+G3Rv1uCT0uNDibbVfJ6r+ogsBTi5iZ1/7EsFVx4L7B7wuSfe9xS2IQbxta4K0X8tprN6PXunGSI8q7pWniIOjttSKb9zqZPSr08FPLzQ2T7tJ8WG7usRhE9l06VURZu5/9ZhL3/LurYSfLihS+naFwzJ1J6Vf42Hui8hTLwAp69c/e7DkJq/9w08pZhv9/v8Ff5KVCGgXnrtnOUi6bs/4k9f9QsV72n75ks8/+C0j31PXw9QUFMvMBAQaIxj+a9+nJSv2eJw31v4VvCT1Wt8KX3fGecDru74XIbuBxsWgTEAs1/Sd77fXc26UoIzvfLaTqS9/cUS93vCPRSGYmbv6BPdy+X/ujsFBZB9nzF7tVHkIT7dr2Cc7mKXuUX361rzMTJLad2vra3giMPCa1r+/ZbL31kwkX4mvDEQ6Rk/c7P/fWJ/7GFcu6In5fd7+nvq/pFODL/vvsQev+fF6T6V/u73v6GXHSydodbR34/L3rROk/VZ2UiVe799HRu7+ujxN793XlOOIPj/NjtP7Dwgj0loNLPfk1+3Feri/+Cm8tmfPsuNvZzD62z2JuFHT+CCbYhy9ySd7P1e2ZX9auGOeu5G3Bpn9X6Ox93P7akm7v3u2Mvlc135u/f2MK6O7uPwgxfAx1D1LcvhXfWC0l3wk5JIoyi80FVz99x/rfSuQDmT0lW3NEbPn3vpx197pctMn0vJkgpl/er+M1DtveiQNf1PtBHoQRz6/P7/BcTmYut8v+ZbK8/6KzZ/bvI1CBxsuF5Ll05K+letLH3sJ733eX8naHdjNs/3vvacm9+oJSPV58umH4KCMHI0oaw8girc0lVDjyNP6V54WMuNpzTZAbfVExW/qCvu7vvSloSe3glEO+T7ireUogr3u7v6Hi2stbllvnvY+yan9dVTc9wVnza7q+ZA9otO6jFPxRHyoPl/sSgnZd7vZSfa4tE0JIk6KcVmND7+ZP6SfSZNXknLu79QkcJ3O3d9wk/scIlaUlM+FW3JPvWcJ3vn5N6UvCZQ/ibAZ8n9vJ9du5IovPA6V6D7i/L+XrwR9U0UrTgkJjOM9qyMqBAQ2ZFwA9/lWb71MKXN3D9zvlbQN2GOXan/UFZ0srroZ56ii6Xdu5mp4BoCvUExE3vl8goVyQRGXU6b19raYs7T/P+uT3ovokp3ebe4TKkN8O+735IKBGXIxd1dyyi1qaFM6ZpFtqC9y396OVt8lBPLUPQ+Zhu+/1IkLeKJG+7t/Uvd0sjdCSJtVc1Ml+qCmzfbl8W6WJN5k3vghvumQY9ESJk/oR/8REFOXLkbbYTNK+11EaT7vr4e+LgAAATUQZsgFfBZ4oVzYW9/cWQu897hsv/uESXvIWBO9X9JmE2j1G0v3ElCdbK7tD+91ZYwj5INEkaLfR8Efjsd/7lkIcuncuGy6TFmJ/8IFySaPv5cy+5dF5f3WtfhObHa8VhXwSDI2JBkLWJ9cy/3bjcwjOfCaY8Mvem9GteoRqtr1lyS+33hQ9RPT/9x/VE4SLl7UyF9/cFMcnXpMkp2O09uiseVgpAT6peN78aP/MFAzd3eqNlf3l5r302dl975It7zc0oVL++WCwQK29+pFe57d/dl+vcFPfMtDyvdqHcO9a3sExR8Zrzwe4S+ehhal4TuVX7s6S3Bb5c5xUPeik8VlC8hzhL6yqyp8o2US2LQne94fXxrFDoS2woId3P30g1dJ93PgW/h7Q+zbIc2yLhmHh/KVKdfPVgsOOtbstA3L88aFx+5koX7gllG2+ErzL/2X1KVcEcOLha4SVEdqy+xsVDK361/pHJ+G/eqzQVaIdPzhoUcoAo3qKcbLdNLpeuG/BYTOuVTx41dmbSSg0/vcmWjySL8uNEywkvLCFxA+Vh7e+V2sfn8iXGHTAS7dR+rCIKDEluT1+9wTfjeLCtjMNNTFvbxgy/d04op74etz/uFSeTAT/9v/fqa/4KClKNbnKvbaoksEd3IR4tPuSNn+xdHeTgnpZAecltB26nLo8cTD7JvL7xwHigW/1Xv9ny2/dYPRB+X9u978ihBd4KTYq7z+b/LS5Y6K3e+90z/Xgl3u73kT296Tora/BVfc7rryv+/fd+j19+HjczyKjTw6I2G+7fWGVJf9i1ppTQSHvexk+9z/1fICKdm+L7hB+QQCYkydDbTbLGt8uU9NfwTXfe3dvpzCXv0eyHLNfUfKHZRd3vblx73wVz5fPDnqM6WLU66J6Wf+Yj37ctu/WLka33fe0RbalSHlTe5lO75eEV3jzFYpIMkXXL9N72dQUWWu7t9GEvv2wld73krf4+jcuV3e79e5II/qWd6PqF9bpMXHprq0m/l9d8ERTuvTfiyOG5J7gdkqf0S6X0/SyelTSWo68607+0nv0mNJ/CHjBxXyt3n3jeV973JRm39CT8vjOD+b9oEMv8jKEvuT4SLq90dryCBGr+fapywvaeJT8qzbB5z/FulPUWZz4Ag79X373TC7xBI2r+ILHwemewlf2hcvu7u+9LNcmv37wj4e3wyIus6Geq/PCCb6XPmEXT3lKoQK8svFaSV8vtJ2VlGpy06PEkeS32ntxvER9fn8517SPd2Nnes4lMxblKR0luS++T9afxxO4sotvnuWnoT3cYz/XaYRjRexsZyDiEASq5NPUb/KfxghfMKcA/l+/zE3e/sSfcQ+7wl4eEJrbJ+5mNeYLnrw45/ev8n05Y33v71m/tUlNXWqjb3cVuCR9bOUj481I+05RdZHsTv6/2aRX0y3Kpk/Xo6UYdDrDzlHTdt3Buet3x91jXxwfNb/3fdLkI8wReT9RD62PC2V8KE/bv9BStbTiRp/93r0Qpn30fpaEQQnvIfTjL9e4TK51ty+91lihA2g0qTm44zTS7JFbvDraf2rUkLL/X4sl6XLnzlq5tb/5YLjt3fG5NFfkmpPyf2T5UiFFquq+FqyZb/um/yX3al8nJJd6XkqdPJJVd/D3xUAAAFDkGbT0pAK+CvzDsvMR+LLj4UOUGEIXtLy9flI71DHmJgi+KsHxmX/3CHhRW/Qn+X1iNvpAxnFX4oo+0IVJ8u87+4eJs593jpzfDSVQQ/D/Ly8f/9Xli7gG+4dPPb6Grfe31BgXLu7tdzal/UI3vvfd9lu74XXuNHSwlDfn0En1dvah8tob7vzm//xjTrdwW3CF71eY6YGb/YKytKY0lzDDoW333R33rxM+48J587/pf6/CRXS83hR/gnFCXCsXd3+Zfu3wS/KPh5dM+1Bl2eOOHBf/tgbXxEiO+4eIgR38vQJqaov/ep2VfDF8seWHcI9z+6cXFXfsQ3NXXtlKfH9VjSGJy0q8sEnaL0d7LCPdzL9yt/J7+SWondtz/ddtEl+4TfqCAU7jytOcdoaL3CTVW3et37ncMX5/d+CqYscG+NXQzxaB3OFW0+ry0+SOPcFYH8Fk4d26ua7dHuT3v22xGVKUIvfKK17hHu/DjWXt1UvjIqUK81zNtfClyB5ClkRvAyVFysHQXelV1FFqE3tB2Q/rIYldki/prFoaTrZvWCX6Tf6i0xuddBF/FcPqcf6vokjb+sJ5WOvu5hzzu7u/ZYXlXImi5sR7tOJE5u47e9WOffq6cE27rvsFQ9f9uy7+zfnXr8Wfcy9zC3SQt0ETZbzfco+yQUFIVa+ftk9/y19yRsS/M74c40U+LL7/bkZoKyQ2vMtDlDaA7aZHCz99ffZaLB7m8vCPokW78nGVaXpMt2UMdT0/wW73PF8gi/s5R+96ds4+8fxEE0CV+J1/8bwxuHqFY22/IHoZi3VMt/7P7wTzlb3cfEv7b1gnIhjNvGzQNpZ/vY6FoefdJ95e7vfSitt3vfuW98nrnS5KshHwViMrJOf59n/78FR324X4734/1faQuU5Rb3/IR7vJ+7qJbghI8NXdh9cO8Fx6CP99zp393tvoIkuXLe7uj6XNCV7u+/wnfcv3l+vSNbvCHgiLNs0kv2FDamXYiKxfljdz/0V8uyiQp3bd3c8fVN3dzd5WJOPTv33+CaU2/oj9Iqy3F23u71r1EZKcPSf/6jsd1eeCEpFp/Tq0xNwQXjw3Z6Y/GppP//dMbsfbSiUEiO/kf3hfd77q63+qyxcp3mdu6eljMr9Wg6733J8I9AmvdS+PTqsh8FAh2/V33RY2Cg735Xzp1YJ7V3vciZf3yMFRXu+7Ge/Sh8XfSvL95qV34iCHLecX4LKLKkfd1c980Wnx5tyILvzKg3FKr9RJ3CLgOwdnPvn9LI05baU/YyaNK7c/S32T+hkvf735YXc/CPiiY2ve38Fhlri9Zf//GHcqJ+Onbhmuvpe/oYLd93vSLSlZu9aYhIhxZP7pSTxGaek6vdY+W3d++rLL9RGhvX+/Tl/cmzaT76w/u8rDh5IWAbdCtlnYbiXI/KumH6cuU5VQyz3On2xh9dQwPIZKbeP+nL683CXijZPHLnPkhS65S+tTR0Wsab9T5weVUT1obXfoXFiX3p0tZeK/JLe+/Py+/qERBA9gzh8e7jF9GrTg22cvpX4w7wbHblvDNwnz/PvmeL2qUr1ZfCBM/93d3hXymd9VZTp769YmSip18IFfZS/P976xRk9sPOi6n2VD933p93k+vOiUgRbu5mQbVKoW8hHCa2d6VcVae89eT1/JK95fpx9xAl9zNv8rBPSMZ+KzC06ZI6t5cuKz74Y9ESImoJSkt8I+Fh8eRILIAAAAWHQZtgFfBYX9/CIrNk3mVSUxuC7iyJcxKeW9S8kyGgv5iVDN/J4v8EGYFH+RoJdr70Y2N2c7T/NoHmvLVXj5f/y/vbiStDJcgkeuL25e+bd124UIR2Q1sPe4Jeh3m+wl0+O5Jxq3LF2Q2OC3t3v3Hl47t37pI37vNfL7STeS79XuE6JSyacvcK+YZNsJuXjb+wpmQCYiisq1oSzV3aNk0IvOkv92sJ+r9vftMi1sv274zxd3gjuu8cmex8S+LZ/y/T7gm7I+9k3m0k7gkLJelUnpJftmksrr9y6U4vosssPW/tMssFU/973ZOlQVf2FBFSHI3Tt/d/KiUVh+RPs2vIf91vaaBNv0widOFtWEMvq/b1rrBWVrI4jXocfp8xhS3bpjRc+8jovKSBZsohP6/KgUcoXK7BW/tjatu+H4bz9oFG8qi6bua9/I4R8xHta3LG7Pj2e9/M7hQM2/rKFYrFdOWsL+aZJzqQsG0UZSk1U63cb0Kk4ZWEHy50MvKIu3nfv1I/1hRAYP29l/8A4nEWXPQ3yGo+WnwTYZRIWjo+0/YJ2En10XlglKMvn5cIPzoMNq+CvoxpttHbKffHY/BCd28ovJZLv0eKI23Z7v3ClQJaSLn3Bq+g3SF0FaG+5T2hHOWmufqGr5vUFJHObRL9GLKVYyDh2CpI+l20zzxHdooa2w7Viz/hJ+RAvNbI3e7buTv9/j8NKZ5HxXnsDIbk+Kw6oad4IY6P9TorBATVaZ+aLD1mWF1hrcajneqpMX79U7hDqnq8FP1X3TZFh54SfvsW0dvf2wUTAU5IfbFKCW4PTDpwSSP+dl9MnUFkBNr13f2gKY7GYRZAfRPo7sbFQjaa3/HGfRAlaqqbSoFZrTZWpSam48WQ1iPd9sr69bS5xIndzkHLfeiQbdLLfcI+CgjaZv1k3LdJYLN/OPVe+f38kFZTr6O3vbTu99brwRWN9yi/+58O60/qqPEbykG++tYvwT+PdzP1u1wSmkOZdhb2tLncW+MaExx3WXqy3f0hRL2N3dwh5CcdT69wV5GdN3d3d9eWCsr3c/3n8V22meNQJLN/EL/GC33d+ld7v3yfSV3/V/giIe+lV/ghNw2k/y6PLMGjp/p3v9V4q+9316src/hDUFRDLzJy/Zol9rZ6Yy+3d03d7eXvXZUEDz2/ZPe/QmC65fRYzposVJPi7WY1yXaSTiPPkoavJ61q+E97u/VLQKj2c+8vuqaRS/9YKCTx8+OlCnHrF33bd3+EZf3yBPcaWnFrCW99tHk9uzXvE03e99KWIKM0O4zka/Mf9YT/vuEX6hkmOoNTQdl+X+WfBCRu5pN6Q4uf42r732qkQKS1j69vHrdNIi8pVKeojU33uNfunFu+2h8tMkYRtMd5K90TghxUqUKXJ/DBsJfnIrQBLu88zXe45HPN7R/8In2lZwazmq6LWgMST9efOmW/Ob+pwb+4R8GBIdXMcrxpU/ItUNO7z9daNb7CB3nxluM+HdWbfrHC2r3e930rpIgtb6ETR77KlO8l5NdP/J+l1vpoXzaTvJ+l7ShGVk4juyKAIo4rAP4WAXdq0F+ogly0z/6mhHwRYYHR06+Npohay4ldDukJqnpDu+qQaWplwNcg44JfnDpp/KyP3psSxUFx8UfLGn26I7Fo6rW99idbszv23ifmihE2FD4Sv2v9ICTetXPds98IFa/1yeqX+HzkZ9b9Z5HlbcRD68O98TXFpjS/1qSFSPvuM8/3n8J+S3Ty26f7+8El9yXSoqL9ZFbOXf1IIL3mP/Jb8dlddEgulnzx5WhbyarrvFEl9q6+kajyXT+c6ubP9anTa7mK8ua9IEZJTjuqVNaWLvu74Yr/EVZ8HfP4i2uk59mT6/9yTf8j3tEcFcAAAAS5QZuAFfBZ4sUlL3xv1/KQbmHWxuvLnONqffLlydCF/DhON+14UZ3/hjpY2RDN5qNZGvfFMH6bZGZf7bcIlHCPu4WrS4cM3Hdv3GkctHcP3uQcNHHIvUH/YcdCKT5vR1eeEr7ngU+2DALy2VIwycJehPXuLu+9/xdkpq9S7y/+pbz5CnmGXh5Lyy/u+C+Hxf4e2s4JGZYaQ9vNjtvTDbYfy+3W462nwk45qf+7zkfhP9kUuQ2/T24Li5apNTD9O6e8fvblpsCMHj07vPH+GZQaW9E/e961XQu+7u/lhHNve7099lEwoX/2wTiHoyH3CrvKPf2X7vcIXu1yAwoFrSYNdqVuC0s9N30jS9xfjtjbnRZfLj8mSHJ9L0XieBnhH47nnk9O/ykhR/mEBoRciO/aFTBdyoHFZQhhhKuk9JN/PBMWYGwQ+W917XP4usERQT9rPxYGf8tLaQW9snh6VqtH2PDvd+svo8mRehfBaQgVnZnO359pNpBSBN6y/Xwguaj4fD9LYOzoaRBbS1JBG/8vprJwk/sVBRxZh7N73DmP9Ye55CEfkLxnlhRjHdECfvOw7XO/f9fcsK2rmQ3gBxgi1A/ZENtagSwJfdJ7o8Up669jPYG4OGY48vb04KymTf3aucc22m0LYJDDKXT+XqHTvKKPu/KlIF9axzH6csbVxFyv7u7+xaBZH/Pkx2Qq/wm6FvovMwVkwQ9nXJp0lCG2KCHmAqW1zsMTl5RlWWuHyAkLc7twEn9b90Tx1q45l1YIrpcOq+ta9zmWNe/0kdFoS/6p9ciRiu6b3y6I8IebL52L28FZHd5bct3v7f4TO7u736wVXvfe78ChS+T6sXd9XY+ESHv3d3ezk9VfV61qkymDSS7p/l077GkJtv0VZPV97wnP++9fUIL8RFGeARJ+ndy/L5PthK97kf3vglO85Jy9+Rt6tE4++PrBHavSr4ve771vqnUvur4SKWHnlyeln7qKlMpvBIvPDX5PrEa2gSEsaun2vgk6ltFtE+tvrBYV4IW/Vxuv8YzVsoq1hDxpi/qRn5/SWQXysSCL/EEe65H799+okS+/NesZjrt7cP3SPmlveqfCN8d/ex9yi9V0Ft8gKya3hN3/6kvfWqRefMn10t2EDbJ8w+FO7pZJo7VpfCRXv181yeuq9m3ve+OK73lhu7/QQnth9kaOXP0oR6BOOu3XfvKxwlzLvd13duT2kxdUvTZ20yyoX7S85XPlxeq9kd/mlPLT+W79btBLsld5j+X/koJ6YfRvcv7aEosIGy/bJohwhUcDBvtJVH7vmmft93LlchHCPqij8eSsurZ5vJnzhPcvf3fk/qYqY09lI3+xI0Y7lZG+swiWafSRd9eT9y4Q7y+0z6Ie59vJ+mKEUEUAkd4DQ8Hkk9YPcBJv5X71GHaDKoG75/h003H82+mvGsoZN9+vXCuxInqszxXT2VBe9w++1Wu6n/73d36sLniOq1IV99iYJzXhNylzK2qdWr0XcyP00Cv2jsJXeXHtPtNdynCbhbFURMaURxdM7ZSc8eqJ/cgl6e3KxV73pWiwnKit3worzQzu/CmbE/9cL+iJERa4gslYzCNXo8t1ciIiDxMfD/xcIRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vAAABVBBm6AV8FflHZg+nL/vhAvNg2QvDjRKR3+YmEX9v8u7hmUxfqaGFlcI99YX8EhNIJPAqly/+4Q87zGan2ajZB8pUzejQG9xpQRrZR3fTe7vkHz59o8JF5pgMluTcv73hEmdfIPXQXQvB7FJPptzzutX9LLa1/+U5iJZlO0gp4odzNhjZTzWDXX2NywLR0NG8CRnPqfwp2WyjtlLofjjzDhEzn/lgv2Q60cwvOJYfvjX0Y/z93nYIZl5o1zLrvatU5f4krvdJ31p/tkK7v+Xl5kQTL/7gsGMnvaLkWMi13cb9TPuJvUv1xNIgrVTil8v3dbKc3fsSwRQEzKz/b//xVvVn9Hao7k/SuvBTD3ty/P4Bduuv3vcL57bnI4S2350D3tboHvhLxWtOTb37gqM3Rl63ljfxWaLxAYtZO2hrjN0mdidOuFI8+9Ducf1jSzAuHb/LuRtG6fEfteaQm5Sn+5JO4D5zOH+1fGl4R685UGMvXu+7fSNN/5Przt7CFvuUavIHMJcA19UrYk90Tlhv+Qm77L6woZtsZGfDDihPZZYetJc01eJteuc3Czx+zv250RjabH+R3Z9y5w2OCQB2Xb6PGZbM+C9gEtHwGyhfEQu2RobbOf9gp6jA0QIr63HICcZKH4PSpT+1k1zvJy/5ehKeEfQpmX/ywS92jqeVRWT6Vr8PWdwyzK4bSch/GPA+i4WTeevfWaF/rInCkjyTzNrFafVbGtFTwq9G+G5/6u+3pjB0rPT1VO36v+CCuDHaDrnDNhuzWh591j6JrPZKU/lKEvC2bz5SXwxu5Vyr91xlf6azwRTzvSq1KjMFuE5x7IRL3cpV0pbaluFDelv993u8q8pWB1zp6p3lIvbWThHwTbZ/m8rOLdVhe+7u9n1desEk2/x2LYLZdvKXfi7ro7BEU+RbdYJdO+PFz8usEnc6eXWCK7v7rIIhyj+mzqwQid2d+1V/ynveshMEZnd22hHwRkP76/GCR6v5+raG5oP3fysJdJ3st7vrSplu+T0uqXMZ39ZCny/Xk9qxP9EKDS5pcNQa36ylJUpTVFQRu93d7nTfk+umugV93z9/OmXgOoR7FECnP6Zm/cZe6T6dmq/LBO73rorCgnPnH2VTuXLPYcJ0w0rkQe3vD7Ubejpl+bSVpex1lBd6DMSf3JLThG+yz+zv17yc0dEi4lh1wrfLvlGbXcXJKz4J/rHLumsZfdOZDu9/0NU4q9x24eW8I9CZl47SMz5zxudvZ1MITLzMXdROETu72n72OT1utRbDO31n2vrw8WE/hM8T38+XpHQDv3dZffVN5do5PSr/dFJ/UOX3Mc/pjCy/v4sQc6YlZJtfRAk89F0p4493eQ+PR++lhPdrWOve86Fqm73W+T3cv6BNnSXeaM9CPh4lr49WrJGtGidjw2uBfVHf4JzN6Vzb3N7VBA73lxJ3e78tC0U0/bHYrfeVJPy5+gkSbbvtdwRFyZRWtH5ff8JaBvB5N/J/VSNngl6WS3W1AaXiPaTUsImd98I0h+1h8hw137LuQlrJI2W5JYSL6NXjxCnXOEuEf41Tr1+O3vY7xX9MIFOwG5P30UFo+fp5/7iBuq6ZLvdfcwjPevJ9S92+qpO8eIlwrG42cmPg/jFvwykG1N2eg70JKEO/v7GgouYPJeVYdcVTpKu3KSgnLD3fvY8Pus+FMkgWbJ68kWNTbdIRNfezr9ogouxcQXNsq/0L95CF794jeuyQU2z+YHPuqTyYWyQiRoZ7yfjPf4S3fFc0ul3sTmvJ7T0I47tczBFfc7Zf+pIYr6+sJE3ZKn6/ESF5cBZAAAE8UGbz0pAK+CzwiKw9m/Ihu7Jm+/ykW5iVe5eTXl/3y733wv5iTzMg/jeYeDj2Jt/T9h7E2/s8Mr5dy+a60xYPNSr+HIlrl/bdRhW8I7x59MzTr4muN3eWMIKPQ+vw2koVTyjppdgpSkdL+/hi8w49sITxGYu5P9x9z/7mK+neqW8r3au/xnhI/NbK3u9uHRysK+YZeOm2X93xnMcKTd63JlX9AJcRvtPy/v2FdXThxbdgacw92Sf/uItv0udxEey1139/vl/vfVl1Cnm8ZvS/vthQllO91frZOTu+W2qHvvsZ7uwyiX3vd0WQ8VFaYllhAp57bnJc/9v5J78n6XjdhK6Xy00JZsj/SlK+X14mKkeu+UF3u0Mru5IP3LduO4CLzqaox962VnIC3OM3P5P7dpy/y3NrWE3+NEKWVlb7e4I82VIptWG7PXh23/3TTQKpZNHHpPEUTk0UlVNF12FG8yBbNfyLHl9+CI8OL0U//6/9+vpr8KbHBXFpI5HI9b6q/M3Bls+/g7t6gtMYIq/ln34VLKgOGXKg0U3qYzVR/v9fIYS94S9CHb+wT1q71F7lH+3eWHso5CN5mYZEjtSJPkBH9m7rwX1U0L/7BXEB7MVbp3zs1xdga/2dXzof7QXsAl2l0ML6/9iYJOWd8v37gnlWJxtECNur+kulaAvx/w+Wl3TO9vu9+p1f9r15fV9QjMJPLjYZL3IHb5FpPdM+hqxhoaUzfIoua2jz13B+4n0JjxOG5Pd3vdOl6mJd3CHmys0vRiH/v5PcFcDykzilTJ2uv3fLoX9uvWLXuixW6osF0DV9Xfe9M9qvbvpldk+T7/8ERN3eEPBFl/WX7XiAVzpyaZS7OM2WMvv3uFDisVu8ao1G90i52fpQ22HdWuGaD8fSv81U6zJeMIiH4KRY8besJ223AjuQXi+NVs8EJrT0veJNDqL1aPd0lFvCp89+XqX3/8NGdy7rCS7G/3+Pu/d3vfp6vPLffuS9/sFhXKw9ux93SIL8s/3H+TGcbr6BSIb4HrSLCs6W3oxC/wTd3cbG/v3lOePS1T9SVOe+r5zr90VOrycxBoSXfZ+X9p819+myyB0/9lITu9qJJUI+EiZGU4+md/GEddOT3N+a79PftCxKNjfLr+zsIz/58ufC05P3t+nvOtXeE7re7f4SveQvfvye3+bizbgjV7+LRj/szZP0ksalElczvAJ3p5v7WxK+kyb33Y8rKPf8r3d5YfH2j+dYN5z+P0wj4ITS+6cPQITO/48rEnLh+5bCrcstvzJi20uT21Wqly/SklYyf99y0977PKTmy1F9nyVyfSYbl9BG5E5yl0mruz6giJ5aIx3+J97Bve1J/brbjCW77ub8At/Y7+/y+T221a0h0sPDiTF8vdLD6euSoS8IEn3G8m7w7TNVTvspwnve9e2OK9KynpP3Lb0opuxfF9+6FSl6+8puf21japAqnSFGkm5liuMCM1mn02L71XlLbh2UX2D1fA/PqDnu+RJcXX0b18a3+loRBLzb3udIVWi+T7+URJKUs3Q+7CF9+HHqPemqLFUqXNfiJbzz62V89OzIxt3k+qGVbreZ5LzhTWmS5ZU4WL9N/rZMJkKjt3u77xB577fvEdTpqt/JBFyynBtImtr1C/qkXyWmn/ET55LvL/+Tz/iIJYwy+fvnTdPvefA/8ZAAAAE3UGb70pAK+CvzDuHEfxZZLOcs5/v3MQN5b78vksL+CQlouMemy/+4R3dmQcaIL8T42vYQJe8iHZf9ywRF1zpp3J/TrnYQI6+t43AeRNi3KXqnw/nLkQdyBMyu4dv12RuOYv6/Elxs/vVHq8W3mu1V5kF62e93P2i/r3dnHFj+CPpjuhRQp5hk7VRRK79wpnSTxTjTG9uyNlvEOqOa85cuG8iPx0QzSpQZfu3oEu9cXcq33g7UnJS17jJA/3d5Qy0HOV0SGjv6acydNgvWpr77Lyff/0X4zaC+FC//YJyR3ZDizXn7M/j3dsGX3d8Tq6ZR5oaYd9aWCoqKhO+51s5KcChsrEZ63D9/v681yJ5LPBNYeaZZPsJvYu2ZPTacy1ef4UW2WMEH7u93Hdu75g3r/wpMP3vnfnPvc8dFZGPE+fvDpZh8+uN7Ydn+upIlkiH8ty6tfc2e3EFaPym2FYmHSVumqPHx9+fJF/DMGT1l14KicfEgcNaad359N7hTphBs/DezL1qtzdiDtfhH582l/90Fbt5B4ArdfrM9P18/XXRRN2zRCNYKDCH36m7N5uC0gPa/resn23bpuPyiE6/c0+Y2Eij9WZYK6j2xW5NgTVgTJoHfYUanYC8XWKO5X3yj7fdGzeos46GlNIbkF+9p2XSHn2Ji5c7MqDq3wS3f3MOTmn8FmNBZsKDSbe93KqY0kVOMNoJz+8Z7zkndMduNSGQ0zn+WJEtPTy/J6151giJe7i+TJOvuEPN5f8STI9SslNT7/BHtv1+C7huS/1JmHf22CIs0qcG7zwQ3Iy7F37wR+HV0KMd6xd7vvbVWCc0OPyFkQ39tawQlWFNv7h2VEK5fcIeTeXy/7eCMjxW9b6xwm7u7vKxW+/dT37FhH3gv77wVEcOOG7y5xlki4v3MIui7cYUxpfMl988d73d+a7v27RmCWleHmeNHBurM2rzwV5ZXhvZtVMWfkF9BDu77N7vSQvYK+Q+25U7u+UCu3E0xNOuR7/oOXvb7t1CPYKjOWRIorSn+2m3dtdthKX8+F3fuYr712mEqKcfl39nvRvt6y3IcXrX7sxX30eakBdUU/9tZ/aQ/n5w6+z984sleIVhOkvu+T7dz8n6GXKoe7uRS7vuw3CPQdjJjb9OZNFmTf5YSzRLyz+/eqUdHlorv1Glt33cvyCofSWJ/4rk2191jWEJD/MNK0UG6u77dTfwXzlCPua2FxT62n/qCAQ7vmc0Qv2Kn4g3F0oOG8DP/YRve9ITlKhKfvTtYot3d75f3096WL7vh12MI+KJjc7qQRHn8v7+jXPwgd7u93RO7cnt2/kKDAWRz9p2jxSs/Z/3IRw/dv15JO590PYSzPvd/VlfZd/cX03kRT3d7gl1JklvzxT0WrqLNw3FGkUuI/////wj47CEjcvxkSF5fyYR8mCVtNf4UItrapfB8TeT6cgEmOyRnJzC1xOSrfX2YuX+T1fqJLl/P1T++hPyRRqwg40w4e1KOQs1vr4Wh6LT8VvO05f6J9+Ld9DDi/9/gj3cvIkKeTLuifWv+tzutevpImf+sIlq+733vXkgp2/dx4T+XpxvS3up9hb2abtddYmt5TOc0rfvCIt0ezfKOL7pEODWWS8Zxwxf+IyQ+XZfNJ/J67bioqJ7qfP4qpEgsgAABUNBmgAV8FnihWYEIdyxG6T9xJNfv1Nd/F85o0pcgl9I58sL+MJy5JOYtDSBNsx4MUzUbH/GljL6tFl/mPYXTyOcmz3qcVPbnr/3ClEURvb8Em0nbhCsak13qti9tsWI33joW34k136vzRzuU4z1/4qnvqz+Er5Z8E7adcvyX4nx4Z02z/qMUhTw4aEH+mYBrekvrfJQhyl3kuEA9Or3G4azaCb+qsb4EX8Xf4fXuWcPUQeSU0s23VFwEM0t6rQ8xfe4spi9faaC9kt4tdyzJl933CkfU2d03+CT6OfKVJVw+lw07CdtEK2CdkLRv9ypvtsKejMmZfaqr61zDlp69C9QKq79IwUP+wykEq7hQoRcbhSOxjLG2lebTPzR3uct8bI5abHsMKHbSRbjeEH9W8O+cmN0QSf6FcVuDJZi69a0zlQrTeffk9JdfG0GQTjcjnCRarYRUkXEjvvjRafnSagxr/y++9BS4QMOXW/vwtwywOjvDq6Lfrghgt8sEs7c8xfb0j2kyywVz+/3l2xwwvaNwV344oim+N9xkk817mjtr3LG8Gb7Rwb7CfgsCWn/XXT50BjvCbbquOlYt72J98ZQv3uHxsve4kuifPy/9ZPpOXs827aPWa8yBh6LqizeNepp6CHKoAgPToy834YrBv0Lb3+bhVTPe1wm/seIt3e+AJNy4dn+/xnswEe+bJwxsrgjgeNO25fr3ChQWdYw/ekUeKV0menniA7K+rfcXuQX4gvHB6wqimaOfcIbIi3znyvrJhfTWzn/STxtGOmmz6BSTZjx/Wmx6ho+YksLPtk+nI9cKUVh0F8dsb+5QKWrNuDaWo+51JgvKJJ1ze+T9aCBhzHlhL1dl/3bBPDSThGjUI9t24OjwvhL3y0Y2Uk0uv6y0PDlmvJ9qo0r4U03FndwoBfyz/fDV/15p/d5w5LfK7J31amuhyqMbPoZWD+Z+EyuMa3yhONiPsWwRGoOp23y+pZOCg6Nzg14Ve8OYOnBHHBJeke7yaTbye293ikKwDH6Lp2kknhmXLUnC5jlIZRLOki5OOKXV28PLcfSVtBMS09N6e+nRNb3qEl3ghI5+/Wkt1w7E9e4Ijp98NP7JOu9/XOda5d4s7yxz8sWL7oIincK8/zhve6dMEJQ8kk+x2eFCu7923303bpvargr3d3e72+ZCHo3fgrJzMPve/t+WhPda8q5PrvfBLmf88v/q9erh6/S99G8vCL+wpTzoCa327fdsvffWLyqT/3eT1//3IXd6rP/N3fQmE5Q0+8/6WrF9X/R1pJU99uTe4R8TL/H6bv1BES532/soQE4Vcb+4rd7vsWw9z++yyelE3v/WK7vGP+/erG/dU6k9PJ1wiR3e+E+HvfEDf6lfL0cRfTRahE8xR7e979NKVN6UgqeKdIqDbhHxBoyY/P/wRGp0/HqJOK3FbitxWK5PT1y613Lr5N1ne01Vix3f5Pt8XXNLvpSokEvb9sqK/x2pyw3vc/e7XMxZnR+CSlEdVlQR3JtjrLb3d4h/kZSfuEfPFV+Nd/hc0nXhzsKgpnNV2SJvliy9vdMV9IcXdGYL6ol+5d6QbGvvUOtX/68sgh6t7EyybdvVKmGml/UvZrJ9vi1dAnM5RWEWHZRbksh/a3dOqo1xMJeYwLhhv9+78uoOe9akbu7wtWJLpviHH5oIsrny21JEXa/T6JVP5fki+X7u9uSmOxvHjoz/favrBPxuZ350ghgn1WSTeklrJ6dv+pBdajsq+bw4PWkUivCypcvmcL+qRLERE8PuP+v9778l135LjnR8FcAAAWuQZogFfBZ4sUkGs4a7h79ykJf1+Xz8M+Ykh8Ee3MQWvi8PsOhJfxfGxpLE6NFu4qcI9woUfZW/8J32b+fCCi8fhk90Rx+N56bZWiDwmGgj/dd0vdLfJ9lSYXW/3bn6TfLJX1u+T95MTLHXfNmvLlfn9ezP/CvizckNR6Z+FMfnHIXmOgkv43u1CbXq35KHEdlAE2+dNKz2Q/wxkMt+I9/7du1+7Jhwjb23G79y/flEKYY2vMj77iNK7BDt/ktTr7/m4bijF4SaY7/GbbUZi6tsMw6G4O48MjIMr/2BW/uIKNifQ8OTnrXr8dIDjM3uayLuaeZpSh+X9fCUoi7TjAZ/KbrXBhixSjh+9vCtU/+FpDuj8j64338vyeoRy8d9zInp3feliSny06vZBPzCuNyX7hQk7jGUyUhmnzuyPy0yMIux6VsnNEjv7i+8umxt2IiOpvAW1dd/csj7EvezgI/47yvvNlPktsZQ1xbUrf3VFYeKGmetLejR3p4y7m2np952UfdYS7n/WMvHZlUCF6OtP+6c8I325H+EPHonkeWNJEy+KvDbJb0O46j2LifLgyJ93Rf18FF4R+Hz6GbB29wpmgWCl23eHccE8U4s0LXB/AJwm9zptJpsKdDHxULt97eYk4SOHoZb8Fla3go1hcvzghPLGigyOImYgt2W7lx3dzxEZbQuf6TXhX3/G/mkG3akD950G1fw1LBkifqxHrjXf7BbukjSIvoqE3VUWXx91Qg8WHINM4y6U6TvY9StkjMO9IbZe95E3OdTOlGqLRUut1ITdH8EpLHw61JQZAcgnuui+6yxtoZUHtdfNdpVPr6EEP1LbdYd7x1dHk+lTp8Evcg6ndrprs+19b0uEn+CexPXZbvMmlWisb2jSmfluMQnUp3AjflnP+ObaP/6D9Eym4EHp+ogvj1kz465t1dVc0drB9dy8wzlJyZ2/+mCQzNdeexkx8n0ll3mLKIw63fL9v4JTP3gqlFW/BcV9u75GdXWX7kwyt95f31FZRKX5gc+mwiQddH3hqhkCEycgWqukJODJ64T/onfZb7hHw/3Ddbs8FMmPx7oP45i+UgJ7sY6Zf8g2luHamAOKEsvO972tQ4Kef+T1fc/E93MPsH7/IUj+gr7VkZl1/R4LZB/fNHnNLmgh0RX/aVf8FPdy61ZoRRRXV9320dYfokPL+u97u/oTv2+GPv120r9F+0C7e93f/5xfXnh8I5ILBG6u3Xd671O4qjuq+vrWONfXVVtayXfCK7wU2g337u9G+5pYZk/29sFRN2nd7710XtL9LRpRMvva3iL3u969p937WT6T/E95NCt73L+l8Vml3n+xPqyZf9o13wj4JvLw26PK8lrlwWEuHMJdk93em2T6zW85ARieSQtH4R7ujvvPuxsTdy/vvr1qpY6vWTdjfX6izPhL4sPmlyLJIpVvKXAj17ffvQ0W8V3p2be99b3uEfC8iEkw8k70jceCftNC749100ZnQM+zaxITbBQIf3vdzL5O+ET3Hct3c/5c37QKBeDrg5cL+L00uQhCTR9ZLvOtqiUIz+T2m/lJyf15JPWT2aPVZIQLaZ1y32976d1lmEPA1eK+/sXPvvL+kz5Iwk/UICCxDFaG7n5/vd7yiZx9+43VVxW9ekUrxW/J7GiTHWu6Ebd9/qjFLzdze/TGm4c4djYqEemf7iDXzv55jPWv39DYQ8fWwg5unMNd3utXtwfCPuTt3uO0PLxZhzK/qvBZe4r4cZ1P7zpCmSE+SFaTL4gydZtHqe/ExImHWSX6mu9hWnlm4rdWJ7p2gvvDctLu/Z7X/Sm0nLleSCr3fPy8dbPpxuiQSWUYldD4V8QIVfSX4qbXJlqaXJ6qVmiOLtj3vuNTVO26veT5Pr/JRzMy5ove85iVg/deSQl7hj1Tr74YxX5KnqyRBSbmZIco8FYLIAAAReQZpAFfBZ4sVy4/cN+HBHHI5L/iaWX/2wjtkOZqlkwumzu9vX4IdZiyXW7Q/e9oOCSjC2TtPhuL+Se7Ynvh7d5cfxq5v/LVe+9SxRUi/3b+CPuNtqd/fPbhXzGwwvdmfKYl/d7G4SeQNTp23LJ2IJEiXNrstqBfx4cvXosOwlPocDnZ6aB8x+sF9IOjRFNH/4U24NsFbsAYR+vP7dlTBv1VB+H2eOoqV9x1tGjYX3d91tJ7jANLAu4M+9bChQl9tby4Xsq/d3e+V5Z0zFqlLCHI3ODzSvgm1PX9Hu9/LGZ37B+N9uUOtL9CY+NxnDlO0zj8OLqPJ/XTliru8yKCP4GWPtBEpeNrLbsyqN3/l5nkaE3+KCFqzgK3Jvu/xnu7HYiYXfvOFzjr/XthMrFfcJ3gNakhUfcJ0V+dl77aF3z7hN0v4Qvve9jvf4SKY3ecR76o0/u/cFG1HRL0eAgEvv5//El9wgQcE962BK/itl+/E2vCsj+/aCfdPGxAbv97u4TXtggEZjju7vgpOnCPk+PX+O9L730Cu0BDumDZwwxlfZ/udmDRkPXRYU4yL73DMW/1o/qSkqN0JU/ZP38byHKKLBPXv8EFBi/ynIQvbBc6AZ8NqNH36OwyJ3d/6ab9xd33rqiXMIve0j1CksFjOpL/768wjLj7M7GSBZWJV/e9INVuMN6BHi4eh2IpcGeG5J0lTXpXwS5oxDmLTRTNknDO4675Mot6UI+CMRPvZ+C2GK/+d3d7iT6r9wWZVwyvlyE5d4I9L3/mMLolwVkeXbAO1xRCsVffCueVp65S+HuYTzL1TngiERurudYKy8JeUXq2j3W7xzpzFPQ72P2eCIke3/94IsI/NO02/VAgpTFbcEvDqgQ8zvMj5RJYdsc9JLZvum9mHM85m/pUVKqp0xG97v7Qy5aPz+97u9wkufBWR3d73Xbr8E0yCHYs9yy0T/s/Lc5vvvCZyRe93f4skOIs/xvf6xb9S035PbS9UumySNDNSw7zMEJK1p/P8PwSHe9qf4Iu7tCK9wViuVcV73vrfuFBLivef7p7v7sW1z1Qn1S12Pq/f2quN139krl2kvtesI+CEj7b79Ne0/Iiw23eTs/v6EPqu/v7T035C3vuveEfRH/Rn/KLe/RPilVfX15PSXrNMbI3xVe6SrkhHzHL30/y3e9asokWmZY53d+T0ndzyLL6/EKnfXrZFt+SipBtIskK9TrTd9fvKxJ+rk5P4oQ+qwKKwIr2pUi71iIR8hZf/CZNARGEieTy/r4jeXn5ffdldgmLuYNn+5JD7T4QRBeb06Xus5mPzd+T+mnkcUasYEO5vy1T+kJhLw8w8/F9Af/U8F8EH1Z+T17rJU4oW8pabf2Xd9fIGJYbu70q9U7y/kKiydiZe71o5UCW73czOLbosIXhC00/z/d+oLrps9sm6QenC662Z71ejnTVYj0oK8/e73d3pVVavQx2TDHqkSzRE5E/5ryCYRgqgAAAQiQZpgFfBX5h2Ohbi+CAubOX0a3X+LC+LJlxLDjqL2Wex/Uvq/Fy9swrxL9eUfh8srC3hwJY5k5EvDuT/hTzSNF39rZhECT+Xb395mEysYsG/LET94Z7fR+7y/veCefLwH3Fr/JK/i6KGRBYK8AEFCcjjy2UhachZ80T93xNzyp4yv3vhDY7p73DNwu8v/yllybQqyBTzGlo0Y4vx19jekErFAYu+u6geGSzf4C1VSNRae6VEve09rboz+zxvxQOea3kVmz7YU4gnu7f34b85EGdyVlpxjvn/KUMLW20PptJWJLuhc6ZkwTvm3LCEiL7QZKaN5eUx8Sw91uPpJH/75a3Cf3u+nEXPry7Zw36wS42nlX1b3H0rqvFTxlGXhxZn+a+YQ1+JLy/u4U8EAxzBetxVzkTQKeYpOgAjq9rLz01jml631uVjLtXtyxADI1/VIhtrvlLs2KQILsOIrAQKv+0M6WX5S7fg+6HRfdyvx0Cv4IDgjv14RP+vfn6bVckmdL+MJ+JTRre+IJ8g6fy7/FZacv/lKYPFPkOduJiIJbvngEnrXOmuyxneY5ZNmvA303ScS2uDfSk/Xz6BMQbE/w+3zQylp5C/SqT9dT2zR8v63rR3tZJ5cJrbLGCnsVu7/u4lzcBe3U/Txef0N8OGL00XDCTsjv/vpfd4aa190zKqfwQeETgjPD0meA75fnc0hxjepUka//L7fuOPKGkkUGldpDBX+6cvrt4KomEyBvW1w4g4OlpCUPjhOeNp8kng6v17gpGMXYBSKva86lWcYefd+qCmJsLv09h+CMbXLpkXR8MO4uBLubq9/lqnwSz3wh4q7DSjASlf/XzAt+WtNxIvJX/d69FCe73mjCPgiGZP+3+CLTuUffX4bh+Lpat1au7/0dhAkO1P/z9XBPvjAl/kHiLadCX7F/cEJnvy70V+/vJl2Nx/IwUGzPhJw9T52WOrQl6evCWUlV+ZL34gnMu5f/fRK1k/UR/9TTh93elfEQm5D/3vr7OSEfBPl/y+v2I5unRXoSy916kvosVUdBK78q/oq7LXL9X6yS+7hHwSiJn67t7db5P6+z6X0xN77G/dXrX6XqspASXf6EfBN4/TXd2/sEJnvrf2JG3d7T9P5JCO/1RTgy+i91Vif4JBBbhjFV6L3R4I+dVJd2LRDu+m/kk/CRfv3IKe7fwVne70nduf2/EC7ve/v9Zf3dOqE/J6r8EgjEQjlCj+/wXb3fS1+OLl17o73dO2hOmv+I9jf5sFneof+JtmEgHV5HPhSseSTMfneE/Jh61mlyIt97+UERsueqnKJu79Sd3usr8T9apom9wu6zMtV6pc3LnqW3m26yLdCNBETNupfucp3RLQom58fvdLhK5JvYrd4Y8EV29Ok/Ncblf+6Ov5Llb2usFcAAAP4QZqAFfBYX/3MKcKvtrymy916lSF/BJ47ImMv/uCDKHy0eLR9QM192ludUjeK8+9xZwlFxt7NOc+X99QtjpGHm54YeBWT2nb8cix+X033GzalTwn8EVvSmnBfoyWCL6Z2SM9fl/23Fkf1T+JENPXuGTsJxifi9MuHr/y96tD+je8+SkT07yd54YV8Ehrm4n12kxr3GY/OVcSEG93+vIFy5uplP7SetBNdyPczaG4us/audFW7gty5btmKaKn8ZCvjfoOjyA0KxStp7gvGJnR7lQXlTlaqsJlOJTXcBH7kb6/tS8F2fZKJHeTc6eoKcjn3l79mNH6dVvl5aZf3dy3DjbAyJr6/hPzEk29/Y0k0Ebpk4CrtcB4++HfYlUWK3XyKS9n/G7IlYNlTwclitbw9mnszG70a60W5h8/2755RNfeavthCWXMCyHGHd2Ocf+CUpSZS+ErTHbs86bvOwSbmQO06PqyXDG5BZbQ7g6nm0/r3X9YKKsr7T3L5W1uWCHjeRPxfhtJH0YMYHr42iwwluVsXaodgOZDRxcQCAztub29F8cmpCTc3nzTRU4fu9vj03hP5761omWV6uf/VdBC78vu7u4S8E4p28nb6y/9Nh4mTe74BTy6/f14btx/bBTnFIbuND8p3/Tvv1Sd8n1WXTgt4aQxnMDi6Am17f/Z+IEucP5rRFT+4LiFDpd0t+9asTZXMg9HglIZdoOlzjL/+q0gwTjJN37XWWnHu+CLLWLxeSYe+4S8EYq763+Cre5VwfLXBPda3WWCDjLpyOmnnh+EPj9f+PVV3eNYUJwQ4EWB/ieQdDcOa3+4gkuIibl2BY/VEw47sN4TFnpMe3cwSk99/cEQqau50e5/fVS6E6V0/IUj31Zu4520t02Mly7iv7qIF8N0xyCGkHqDovULRhdHuFC6ZVd7uX73d2hPxIjKxk9b1yXv2Lbu+YqqPR+6tkKP76tddfXtPwQ+Xt3QIZLiOqcqf/RSK8IeCLy/X4szj1Ho/P7/CAm772z69vX/Y0vVdifiPX1qw1yckI9gmNbt5ffXzi6b5t+j+/t+/r6L7E0JerF+0l6nrrUI9AmmZdCbn8MpEZTvk7+9ddfVq/fXhe9+SwQb+1eNr9dZfERZU5Cvff5ru8I+CUlbT8KtO9PzwUb3e99Vvt6FdriPd7dJ/J6P7ErvNxpO7T6BMXd7u6KdrjMJeMl2/JInTe+RIlOo0E5D5e+utfElOIcT91k+T230K9fk9ubIUfpvJHEKF7kH7e6stdrt5tQsT6q8Y6qi9NCf+jluvJ6pNEfITuWFURmTt6/ThfJKZ7urE6L/kiOXyeie1hf0JSJZLkjJCi+aT5C54wWQAAAO7QZqgFfBX4YHaQ24zU68c7/iyh3NHScVkJN33FkJhrYRs2f825Ub2/F70uHM58FEt3meWamSGPCBOO9UB4Qyzu9qS8f6yP3YN/spf7d9buFL34bk/c9bzRYN37inTOX7/CPd7joGzlwEjfzd3oBO/wmV4bQwWu/Lpalgj054koV8xqTQJPD+6l/d7G5Nci/J9hYVvsJ/NYEvjkqi5KeDx53l58iHp/3irvZBAbRVbMOITiUY8WZkn7GUruP7d8swn7R2H4Z66SkKzZ7j2VRTR2WMhPpj73BnTGA1tfZ2e4dgryfbaYnbqdPcQVhqeK55Xrgp0ZkDishMD+EvBM+ZzpcjfjNEvPuVi9VPX4rn9z51W0W7js719iSmQbZ/lISf5bDljCfgsCEidiepSn/ytTxef/fhTv86Bv96xqu3q4cT3Op1dGniT30fvoExaFuY0QWttMtk+kres3HZnk9JLL3CfmvK2Vq9BbCenSBN+x+uHuV/J+lqyWbjomXOCfgnp1O53tLCz2/sFRkM1ciB33PABAG7L9duOXrL721jeHqBz+6WyD6RIz7b3qL/6Mg/tHOf09pKe/VhSH2GPYqZBjYYTDj854PwFd5NaOmELZVP7vysaoTyIeJIPhFpnuNNcvwl+sXiXc2lha70m41hUiInv/uQfuiv5b31QJhbnve9FfY2KpNNOt/yGcwkCB922LSfbeLVtDDHXd6zZ0BDJ44ed9910+1vCnQPzBhCLi/pigfk3etQNpugHcBD6a/fhDfC67f6TG0noO5tHTr2JP56RDeC8xNIixHT4oYdEP/L68S1r+Ei/+XXYKxR8qkTnlPLpaEjt7ZfODTjuG96QuH394JBHD8nDvcx93VtOeT9NrizZcG8WPD8lnlEI779rXKpt7hJ/iScsdN/qzy+8EZ3v7on8KXd0D2dm3vd3s179H5PTS/wREx2T7vZOMzW/BEWndupCi8Zywh5QkTZf5YJRL3vfhJXrV7rui/zeXhHyEbdN5fJ+NN3TJ1djdbXXCfnEu+imet/ijU6dN95MvYn2/eYXe8v/qbd+/omrFoJGd9424a9NbXZIS8E8z+MmGxwq3PVlgpK7l9t3ufuc9t/fa1r3++vJ00uxO795hF2I/7m7veqlPefhGsmEu529fv0wXG9Ny973fOCUj3d78N9VaXJqxPtcn3+vt+gTFuNiwLB9g/z9t1cxb5f0oVf5T1ky8Rl17T3vv7+8WV39tVkkIfy+GHt/TXZPjv6phvqob8l1/s45KngsgAAAA2RBmsAV8Fj9xYqTC4rXsV/KTHemvdUw/L3TDHgkJDfg6uvf2lNe4R0RYZghgiROqW+ZvyvrhmV3vjoLrqXco+PtDvqlXiTeXl/sraFXcPO5cGkfcXj4UOjxe7QvgJffPtd6Abk/S71/KVJ5b0WJ0y+FkqlJ9eT5OM54V8xtQT7xf47ILk1IzcY3OUk5bfw78b9ITPctKDA/uV76FuVk2/GcePYIcNNQ9w7cSVO0iHi6l5UO/oLV3mrj8eR77nBHa48e77rt7nDa57pptpzCz9pueCgoTct3fxyuxTdFjKbYrkXQz5u/VpUuRC977lFq+zUZ2Lrw2XIH3XHKfhPxgyGdxQXv8DsV6yvDO/fJQSYO72Q7YqI7WGXVNcfjQXttjJ5EuDsvbbAx5XJuhWl+a9k/MITm+s+Yvi9xnHGuUFp9p8i5CPjIeHRWCkpgzTpjcw4eilv3AblbJ+u7qP3t3viUb0u74f3ddJbvkn17SdSTl1dPqhvCXkpXlGit26SKmw4y1/n33X8MuS5PSSTWsEd70/uO54Ht8blhvCb9oeOs3d/d/cwW6t2hn4I4VWzXwm3yfr5Xgpp4YS/OPTfYZvi4aScu/ZPvc/lIJx099Wronet+r/gqJtIw1OOGof1+kT4fiLmLlByhxQb7CfguLI1bn37f2C26IS8x/HRTfS7rcb8/NGhzReGcGdo6Rnj9LpIdDq7ol+18cSsa7OTI/xf092/DbfMHnkcWjsFIl5QvMN3n2GVDg6rVN4YMGscQ+UNvVi89P1+Upc77Hy0N7ye6pZFhO7/DM+d74QMUFvfdzq3OLsKoEkKqioEYlbF8lS8TEvNN98vCJf3iSSCr3vIrRXMn0l6uJ5gvbh9J/9nZe4/7eCO99dfuCPe6f1d5kfV3YlUqc8l3wh5Mv7/KIdv7sTe+8l37TNk6u65L+0tqvCHkx2n9DiWdKx1h/3xukfl/Lyx297vit+mTquxJbr60JaqOuqBFm3rohVfaSogIbvfoR7ZJn3dWtV4IS7v3T6yVV/J6tG7svukftvk7S5IR9CL0/PV7pNCXuxfxFe/X/k6Uul6FMp99Jr6etXhfslU+uvrqxfr3r9bPhHwfId5Mnkwst/J/TJiOUkz+T0/7JFnuk7v7+vVKSbd9eSJ3e+4YvgwgAAADikGa4BXwWeLFLKSktSP+5iZyxxpF/2stprDJf/cJEWiZyiw6SkeKS/vso0sxIpczbA5XqLTCG9fWcsp6J7KvDIb/KLfqNf/jZyNEM430LY33bgCdq7fDr3BWuJl7i7+HR3FynZ5jDbK3Z1b7PhvB5zkv+JTl7vXpFvf8ISwzyl1thmZwS9xVyyvlhtqPqK8KeCQ1I+pq2ZMv924zeEy4+QIV/8bUtqwZf3CrnAtZUEdiBGbWfksZrZDd/5Pi3Y65PbUpSroZfFoe+WGy8eLineELgy+vn19jYI//WoUsec3cInnoRu/8Ymv3D96hmdd/9N24MChu937FxovOXi//L4yy+SFMzSnLa/c9Td/btHM5S6U6w5bIgIW19uqy+SlebneQev3E3e5f0L2jcVmTv/8pZXnSsBQTf4LAk7WHakdcqlv75faq8E2/vUHc7ftXX5j5+XSbapVXeCu+KijYkg6kq6J7Hlbs6FXveXwoX7rbGmPhGXU4Dy9o6fC66x8ATLcV/rO+OkGU/NssKSPUbsPV2h3JFL7vK6H+e6kvLXldv/uMvXKItMCR/6/f+T1X3wQTLQTPHbwh/lmY3KHQRPWpRc2uf24I7gy5qiVX/8Ep0pu8F3SaYiN7hMgaSTWlf3kKvW39H9e0tQp5rKnaOmXu5i7U+7OqGkyCw6rR2Z6fHSjLvc36rssZOjtjRS+T00v7BLbed6BJYTIkn+0lpBMbLe98n73iSZfLwk/arwgEjL5k64WKESO0f+TqHqRFd7lxb/o79i3Xi+de76sSgQyy9jJ7TX9mM8iEdTGvq4S8t1qvZY+gQ31T3kRepa+97FEvd79H9vk9q/Gfs8xXd32kvQl2LFbm6eN76sFolK9N9q3mo9e7fvJvfRF5SQls/yzVfbm3d9v2/yq0JeCbe3P4aVlffWUSf0z/6EdXghLu9dj6t1+T35Pbb9VKZ5XWvJIW942F4viyEPCoYonvXPCz8vlnr9FBHu4aegu5evsTR6ql7vtI2ZNyAbMziqoyWL2aEfOVY73+/sPkbhl1nhG+0+WNu/4cX7J3qvFEPjby+/qUSF3EHek/6XsnomqI9tyObN37TNSHEFc50Oyzm/cP3Ay+09Ls1jRinwnksUtbpes296TywRHyoTSKm8kR6fWb7pPHZf3hm3lp+1kJhXyGXVeXbvy+hPSRZPrWRsWt/4ayUQ6SfD3xUAAABEJBmwAV8FngoFGln8d0fFYrzGxrTDPmyCGE59m+M8M7umNGXJagvb/AIPmZqypoGVddXvd/jL9Q5BF8fMU/BoPln6XcdBO/xWFh/fj5vxpl/uEeMnhrbtwI1X4ftL/L6vOxZM6IwHKboFLaf2JhM673svvNJLr3CHjMraO98v/0C7liad7iu/L4eXAFV7gtHFNeZSGnWit7xa3cQXeH/d/cRAT+sL3dravOrdeqY6L96y4V8KEtFxzoDVX/y1LLGThl2qnGrodNJtiSgrljPl7WE344sC+f8Y2EuI62k7hMo/l3KGr9UKxpn8ssn6t8TRiu6YTczVhK8j320ekm6CezvdIo1sTBHu86dHQLNz/3CPz1zNZJWlVJEaYKDWg97e8qatpre+Xd4TflgnEP3cNv7tAr/hL5VwYz8GrcN9q5EMp4pQ7nXGRfdgs5+xRfcG8n0/Y3ho+VM6V5y5f/yJEyuy8nt64q4ItApQ+VGwl8nf+wS2x26gJ/edEdoV15MJepLf2KuuFRdvfvrNhx70niYwhe7Xc9hejQYBzd+p3RuF/0Yeiwhirysnpb5NFO9XXuPr6oEJjpwzus9tX0fu9dd+7UWgVkKPsjBHxs73KFyFvvyyD8PUzTzpIEh5n+oQ83hx70JNk2Xsex3VWS99NJyBDyry7fhl06a21f6BcXd7yu1tJ36+rFb3e77LyfdCvlQSzlDEXte/eyZaFEnj8vtbpCD7vd/61vkkNe5eEe2Ix+jWfgrPL+XLuypv02r+pTkf9Ua93povIQ7BwnfX02rjaq/bQJLlb920ELu73fu9v9F/eVOEPIXhp6/bGkOwHn+2ty9MtkZZy9uNiketvlWL/ukcwkFGt7gl+lt/Pcr1VSiT5/RZ81hCMq6alldV3feT0kpdPDG6T3LCyqHzdPpK6HXDaSN/d3e77xPd7v0bp80Zuc7u+G5aV277PeEN3d3e7u3tIJXZvxzW9FhLe+79II73Ivd7u9bpwh4JiPXeRfLbqVxRnwQ+/D/3N7yMqKNzR20XkM7+kwlvfd6TcWlrt1hutWta4LN73hBpiXcFNb9P9dbLd/wlvd75f+iEH73e3CjY+EfBOYuxoTG7QR/+UwuQ3CywhT3FZce8V7Sl+/TooU6b8gl399WNRb37TBHO68V26qhRHvEvj9B6H7Xst9v8tXTcI+GizU6XTr47v36gqNc4rzNuolhl7J6614oj3u33dkKJEp0rvL3dl3vtaL/4heSS8xtK/x0cE9Ei9QKf10RLv9Xqdh7rZD+e4aI8JnLh5G7n5fhahMvFfRS9093fqUps/foTNu/RoISF947V7KpNwr54CxbL0d/pZ8eR5ZFubXlWq010L275cIa0IghOfJS9OtiemsRIW7+IiyXhVueHHuaOufb7uK9wx6pF91Dn4O+/4ifQu5Jet/iMZ7Nt+hF1JL+IjubN4/j+CuAAAGREGbIBXwWeKFc2LmNe5SF2Pki73zczQ3H814btX6l80wwX/3CRLjOmrlulL+94KyzI385Y/cNt78otOfh6ZBOgtNndgkbEoBpqqdnrnP2gQeq15q3bls2Qu9B9cv5e4TEmvY8yJF3Nur17lIcx716ZS6bhTwuI4b0ObXshnZmz1Hm8qeEXsXDuAV7Y3CTqbdTF5fb7AWviTzf878V3f7uqbzGSTc7iHNsv5JxGdNkaGj5WUhLaF8b7aLbsdh3oN5OG3MNvBF50zDSPYvtja92XlfO+Ntn37DcVkYNIZxtQsbut4xE/TZ7jeGmlXhL3/tfTIbK/zuV9Rky4f9NhAsJ/ZKMT/y2Bnm75x8yvyfaq7uOhO5bC+8ILRwg5NDAP6SLcTysFw8aXSXgk4fXMaV2m5Uby3vH3s+5x/dK2yIhQkV585eN2fl4Tdy2GE/BYENpHW179JaJnfxwF1Icf2y/Vbh2W6WFtTLlLGgV3AXvZk6u/YeP9+T27EvtwgV6nlnvmB5C95tEwmw+b+xMVu/KF0Uv+/l/3yyrhI5Jt0rv5On8IXM+CFv842JJEzGC+J/2Vuvlwn5KbTTdbtjYhgd1kZn2CoqrpcW034XHLf2nVmATXpdtrDvd3v+VZZdp/SR2o0mBFufO9Ub978wlRXM7n5O/qAySv2L6+CLcULjbvGcXBAQt919Q53NdruNvP5wxLMJsK+1EHcUCFt1b0Efz5KHoN7ZlW36NfIf+ylDkfFqY+ycl92uNEjkTiE6ue9xP3UxQv+3HfnTDBW2/9i3/SZLQeNKLZx6wkNML2URMnXeYl0tf9i4k8W/CFwf0P1qjxhp/G+IyMiK41b28xW9mfB+3SS42LvDN3eOTpb7CgRipoRXh8aBkuyAvgIXa14swyOvry6m8dQfaWkNoaxqmyFbUYH5ZKqfJf6/S7D73HvmJ6CYzCw7xqfrbZt7XCPgoh6tthHq68z6G3t7oxWh6DQy7Yd+/dmL/3TuC0hbdEBXxJyHg2SvyZCeLDaaPxuoBDkXdejm/P9ZoW7up8W8Am6+N9WMBf5ucfZGC+5z9KzFd+/ibHIbpmQ2/6m7J5I9uJKmnc69tvUSwa877/dfokFhMOfvc4/b2VLB7je7hijve12kFigeQWl8I9ukrdVkPKN8M0b+hMOFS1X8ax8sFO73fhuJg+XZ5pZ+qNNChMIOKZ2FtCwX+1RvMtJx0ujF4xtJ50CiHoP5zqhzzvfqyWdx1eEfBWYnpsdak39z1CHl+F9WXNj2LYJtBcyOyaGOaV9m7Swl6T88dvioTLqw77u/seR2JvuH7SQt31jTzC90qBefZ5PhMsepuXZwatM1XbbbLusFJILuZt22R/JvsdYQLck93vm3a5oojucs4LO0GvdL0DC5H+kht5dCS+blXd9Q1nxn4zKr/xxbvnzSu+ixdp2t72sRIC695Nd6fL9dZRJZTJx/gnHRy7czHad/woRfTfvGKOy25dUU43l7gqE7u4hzvO3bT54IbW7jbRShzWfXglJd33eL6BNIW+y3d28uxPrBLd+8uRfYISPeF9eu68Sd37ny9UEbvd3fd3tt0QZd3fd33Y969yw2rXwi/IzCHvr6RXvqr+ul+OLJ0+933+9ronu7/hG+73d7wj4Wy7I2OnCQs/snipX/fysz7d5hmhZ03rmI7/p5/7GsFt33JbOzJ24q7vfemsQ+/o9E78Fe7vjCPww4U+3+OEu7y/uyeE/DxrV868JbarexsmBYLDt9ZoMqy/O7yI2t+4KD3oqTpt2LL3iBM7X3vrGJwRXe6dfH733d96WlBH3c6dfv9Ah2+nXoEhrw4CB+lvsYV3vw3T2MP3cvd73pIU4e2n4nCPqkG/xxqjqousqibnO+HfZOtqhxRurvtB6ew98n3rjTxJT5vl+j+el7L2JXkf3rTSCAiY4VMLbH8IG4N7odz27+X+SuE/IUdwgy39IIea8sebK8okIGFd/F1d/jhAKxYl3vcummY5n1Kuy68hjkZ9Na0kWpStb6XsXCZT2r6Ut31TNxkv5PSp/mBF3IXSqX03XhfsE/c9mrsiSO36pFJ7f34kqZy21fdGqelX5tJtLkhm76YXST/WnRCcZ8GK/JJJrzW8kQU8xj/dEbhHXUPfGQAAAF5UGbQBXwWeERWW9KXG4Sl/xZDAo4zSmJBx7+Xmv81KXIY8xOG8XL/9i8PQ/ET75dyB0/UVhO1jWCEdlL+740rw98x8f49HqViLd1h72kbuPns6Pl+8tw9xmrczwDkXbDSX2kaT/ct56O6wk3v6rPBWalu6WwNuOlj4+JYtHRYQPMwykjfl+vcJZ98scnpKX6hnlnivbb/4QpValw/8vhXzG41ES/u+NyASCH4HpQS2r6Qe1aACPx5JuzldgYe4TBQ23200XdX1D/Y/xveu/96ZfveYKAlfORx4byP/f4yZccktuvkh9LMKPafPL8OZpw1Kt8gXbrLyelpOuEy8/aPsk63xU/Pgh/y/YmJ3u+/xheeAe9/kfaIzhPzGjtJpZGX+NJLGBG0l/uEikM/bi2KUl2z+x1WZPu9uTd/zSecfeY9zwsrkbYf7G3KYsTVelzsPWfZKjQ03fXCRpUrYkS7xBa9t/h/z/dCVhL3dTrOkj/vpsaWsypzdw/WXF5DDNN6r2PrwwHQVQrMb7yWkJMr/lSRLNFwrUiRz1xaRO3yfS5XtjbVlfluIu4D3L/5dLdu9KA7hnSDD7n7WsFfSOxXsnSDcnywNSdfBXvPF7x6OLje55dKXgouUeetBDOvOnX1hfe+odi/2E/yV+mmqG46EQX2R/l3T8sLmXFZcBVUZ8tL8+wEQpVuvl1dgwvlJeGm6CduftQWXl59dEEl6SGe3+Uu7SIzL2tOm3ksdvcnz34oS8gjdu/wpownfoXZnu25VLF+w+Y+dEkwdvaBBc2H1wncBGq0aalW4M3Pv1U5+Yhabvnn+8M3W6bMPZs/jbSI78CBf++H5di/dvHWg1ISVh4/V931XrBctFgDh9JW23bVE3UbEfpZseZYPVQcl3ZT/JYfub/iC99w6i4HAW/3V1/5H7XuNINFLzizXX73fNV/l6f3ElPnvIXZ9hexcl39/kjZVwCffQ3h2mgOtpJL5FvSZF+gN8dX9pTPIauy0zFXsKYcuGW4/402Or7fVQ8WQfn+D6znmYPP3+CGprWw/jE3Mt/Gwk1a13eJP5u0aJ/I3hCPUiv3m/6B6Pc755v/3j9Jr/J9JJedMTz3rliCz4+EfQr+9bBFdoZeN/sMbw/b+et78c7/+FDTov78sbuv7kEZnRC/65/f5Rlbgcvfc2XuC0SUffGZWrvXSd/wTbu+p1eFP1BJePov29y1/k93G9yzbvrckKEod6Snr/CT75gG6sfpKqY64/0CbunmdlyaC9kt9b5S9bZIJz3um9OUJ+CQyWGnn9y3eDt/iDvftORbqCIQnHu/b+RCdn72z/CPojflIycvf7/BId7313q+/dld230V5P6fysVPRllJX/e5mrWi6zwjszi+jve/WCK+5i62QvsDnWJpXmEr71cUV3e+6fbwiu8FJnRqw6W9Ne5du3+Mn8/vduX9v6E+mum+vqiHP5w1tczyelX/1u+jk9Ut+i3kd7hG77yhq761NvevGYQ8GHjdFrX8RUZf/xRnP3uif5WLTd+sxHf1id33vzIKF3fd93u7pV7BRve7uQTdj4Tvd33tJ/J68giWoJBD3DKSxvpfBXve08Lo4cliDfXakm3pNBEXu933vSRSShPdK997i1CPgnMfrHLhJthdKvhEzt96q1yeu8hLYUPLhYlt94tyw1/aWmu4SEuMV97v3DV71Pi/19hLMv3d9KJl7v572X3i93d77p6F5+f3u/sxuC+Jz6GZj79uX3P9Pu8n11MnQIy3n6eEfPX4Zqv8aYMvbAlbtrz3Whp63NT9ZRLYJyPwbq95/30MEzJtLOgc2c4+f/opqq+8t0Ttbzb1VP0kDAQM8RHzcBiT0XXd79Wa/yeu66fTUKZJNR1bvOyewmJFjXufbSk2T7xDbLXolkpp5PdLkyfUSVt9IltfUSZ3Ku0HPfXZITj5/7yb8jhdLWEiPpUb90kSU/G/0v+pC3Sf4JyOWny4lXxW9xW+GPVIlmiN+Ll31+93LnJ3dCxDuuCuAAABSlBm2AV8Fz3zBDHSThnz0VeHGm/4IMaDQhHy9rG7IId4FaLnzZWb/mm6C6NacW6OZ6fy/27YrOUvK/7gsvlx57hLwcszl/2n1bngoJzChcygQi4CC05bs9Tv6TcvJ+/rT3vL/l8LP7GiFDj1shK/lL85CRQ5Q6GArrQG345K0ySH9MiXruLbEraz7CShl3WB7/+2NKkZ72HbEP43B2MF60a8OxFnPI0RHGthDrpMP/H9ZfafsZBHL7M2cVCuOIf5ptAhO3HvOVHyDrXdqbewlosEOzJx4rGqXd0Euh9F6aXUWjJ+tvWpC19Zf8tv8p+eQTL/7Y0VaNLhHyeIX2UeFHjhsNpISHQePZbFb/TWtbtDdZZIquFYq8Il6Q1t3+vbv3CtpB2Pk4XD8Fr2lbfXEay+13hArEb4RKPEEHnDoS7bvT7I+x+ifBfURPY3t/JJOi899aZfdXrtwoWyiyL3MRIW4S5a93uEXjobPOJXa6jd5zbjMRe9reSkScSZ8avWBLgL/DbtYV+o2jUPuc4nswQ/Wbs+91HTitwi2y/veg9OdLhYQo/ce5c4nTkrttm3rI7G1VffPnPBLtwQ/Vom3j/Pc+pFk1C9x4iF4cvMMpPGpF3kV8XLrXBJ4ei7pU3+JKZFb7xwl4oQ99Im13+M4ZEWfyxn7k7iVtDtCrTsD9aeCCEXerG9AHivkAGI92RkYp2C42t+gP9C1JV9tP1HvONGNXnr/e+NklMgjrS0G1s5vx8RwaO984+7XUBG1M2cLsrAz54D1gTv/z+qx2LnHfrfEFtWe29fv/m93gkH7+tfjyTkU7vDDuu/vwTv/fThsr4G8flnp1jPf+vqKvaMDWr72sFRJWJTdXkbvJDd3MCENYJ48vxtoqHZYK9BoAoP/n/xwHTIhb4JW+1siW7zJ8rFwT3a+3LRst+w/PI+kmnDsAMrp2/vduFlc28Fo3ET0O8sbmsC4336j5TnWP/qEvRKy/2vk9VK68uOvjSMVk9UjtvwmaS3DYHx0rtA2V9dpAjOH4j2gvj21ViavtLcExM6+Oh4JZI0snH3WUrzBqrFxOHLzt4YcPTje+bqmfzdKo/w4JIoel5mT3RsHn5VH8n1RPuWZGBpODQXsPR/b3rUJdgkmR3VjJ6Sb3Z4Jeubvce300fQRvvuU9jsC+qfJ+lk9mLLD1ojnX1gnyjqXKF791YgmzDbwMgbJ3tKlhN+4J93e98R6QJS3u97dav71uQsq9AyamJKtf0Xe/0VnRU97+gll797/fP4QL+/hkxn7991f7fP/TKXd7/Md3SfY115IexnN2NW6z0RyqpXbVN3vcI+Cbe8rFOX4K8/ve7zb1rpoEZ3kQ0o70Q6d+6UtlGfby2X6Vd8vfqJz/e/RfWC413uY45r56K1aZUCiGXu9MytmelSF1pJzpsWJufuL3d+RkvfyRxHja7e9yoIR8Lky/Ha7AzDyQuWrzDiXtic/Ll3ftQgd993vcq9/fXkXSZiSoIS/0T12e9wl4dMULxnvKybMfGTGQf4cUNwmbfgnIXu+963b5RLu+T3/NUEJZsy1O8X191S6bFq8n6qrREHTTB8An9/a7QTHs/Tpb1xfGyux109Mgcc9/fqJ7l77v7YskZ/CqxJdGTJUVhKVhWnd/kXYvp/KVK3tuQi0uMKUh9/JZbDHp3C3iBGq8u+3d/SpKjp1fk+Mjizz6u3cl9qKjkiHRRq8SgntXENH94ZOFbTzXx/vhf0ITrkTo12T11J8QWrVGfvgsgIRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwAAAVpQZuAFfBYvcLCufQwhiMlSQ56/hd/vykzaYz8X3fSaDBf/cIkY5YY3KCPz0UVZpWWTzmjXFZf3txpYeZoKLBA9dbj3P/7HWVx33Kk2wy4v5PbTbfKgjlq00GvgeL2tLik3vQLZcsE5nu8NEPsKAyWK06b0yxJ2bzXd2Svx/H2zPRr0iSry1GsVnwj5Z3caIZr7SCvmMTUgTfq+3qkv7vY3xvATCzce90+Ep/QphRiVWxUEz2jV8cn7VSO7bd9PuEnzn8v/uMnu+PffY6HEUvbHLYR+vxsKvHHuInbCbWGEGfqo5Y2B9gOYvcpeqaUx/7vbGFgivV/vYH8N3+cMJVFsHrzOe5Pu8/sJVztOeaOSHRL5Ppv3UKXq9IEubceAe/g+2ijfsS4+UDhVlUg/e4IzZfD/u8jqXsfdp+5de31eWM+PaO+YOd3dTctBd7WCYrv74yCoL0hR/hEcWz3Hf+d72W3l+/cKaFpvfnDR1tZObwzTs6cYek435bSK3GFMG7yCz3YBDbwSaM9X71hLDqefu4Q+fuqT9JrpsJlXpju9w+lvi0WETXDK8i9VR4V1MZ6U3lv5b3bCXl06y/+VjJmRG+sdm9ujFk0+Q7e26porBASUPhy5eAHPbeU/dkb5nZNv+Ro7TP+4SculRfpwjlhKyXnEYIvC3kH2zk+ixc+5k+kiJt6GiftHJFX4+HWrQyfZd3r83b6VsiBUSAdlVsJcKvuWtIjxW7l7BdKq/LwkcTK7UssJnbfu+8lxVzlvd9OMM5Q1hqk1Oi3z2H3IL168tWdwfJChLIHx8uQP+59lZsQ5PVoNl725GwSAUpxSY/wT5QxggdvAf75+ipnK6T/J+vrlK93CRPrr7BEKdt+wpcsEJFH2iNgb2NNnuFJHNgX111/QcYWDBcPAnPi+17BL5BVnS1wdeQUiDxS7dX/VvwW93cij1us8pTD0oYL7PXsnvXuWCrH5/m+dd9/xZHzLzBkEfu/+jwW3d4cSvuHuTOW0n/4QO7733P8I9AjNFWiN5Zfx2QtFiZ+gXEmlGVPHzt9tAoK8oLezu/eTe+3BESxuVRB+JOnvkMnYl/30RjJ9/9FOZHdlb7gtJshud5X67FoEJrLGSy7bWoTE273vujPBPd979diUCXe+9x2hHxIg/rP47uf32coKD3P4y5u9XeT2095/8El3d+1rr2X9fXvUER3fTLuPES8irfdyhb6gi3vLJ7VfuY977Ognu+yfbZqhHL3tJ7yyuEfBMIuf5or3FeWLuXiu3u9Ovk+nHSxtowvdr2jd0eW76oTXul6cEed//pJe0mpX0kCTLv2msmEfEl4cdFIplv5DgkI4f6X51dlMLIJEj0Tun7vtwWU39zIbtuL7rD0iEv7ZW92d/pkvr3G933PDbavdKi+aP+4q+aIbXa8/1ll/7E8npO64qCI2VikfNG3uf3vjthcHnboyhFTmNB7PXye103qJEvefvveQsgIubafL91kIYhfwj4JyH8ZO9bR/xLDKntnQu93ef6dU2d7vSWZGu/q3e77EvrLfesISylP/L4f1mqW324I93ot6cUId+Pc92tJi9Eie78v3MnlOxvhHyYJm+3+IEP+WNSu9FKKIXuX24rvXkgkE3en7FtFNd110vN9fX8YaswFMg7HygdcR+3+/JhPyFYQP9js4l3o2GT038VJ3jr1YnrPJYvd6/d5c/HeMitvb223yetFIRXMIz3mjd/ufZtmOFi/5iOJM79UvNGZNuN47vusuENU1iYIjnvp1oR3lkgnK2++4AC1llgg3eVhvZXd0WL28m6ZBxG2mnsi0sKPX+Szhih3C/sQ7/xHkiQjf+IvkL3tAugAAAR4QZugFfBX5h0N6l/iy6q6Ri4QcXb/iyLG5K8aOuslwUd3cuPisZf/cvMfY9cL+YkxuBA9dNL/7hHmRHgU09uDqCD9fSNmL/X4wvLZCpPjWZaSgx8z7XHN19gn9w06wN6s6p+Pljztr6Fm4dg3CQtnyTBe2KO7O6uUrL/u+Xy3JUNcuXP0lLcv/Tvsw7ufyxnumyCnoxk37jc7oScHbAT9zOlO5w1/gSey9bdwdUqxy25t0LV+uw1SXvQCnTi7hrDOZ4xwQvA/+/sO5PxniJu/f8F/zK2PDy+reIDjqhUq7T8v/eNhF3JdgQXzilx3xKn29zL/b4//05fp3x5bQditGUbu+qV/haJd1U7StLq+HIOC9/idUf3FwmxeSnSUbI55yb8Efdzg9xeXHvSK6tuieX19m3J+vyld3NoTftgqHUUZzFBuWMLKYCrQtlsS4lfxvvgxRhWGGGIvJ9/Xicbr+hIv28f/r3lj/7gj3e78pcZfu6Lqj+qBYQI+SpcZ08IeWgeLl+0HXdnUKP7BVgo9s2rlu+5dvsa22gXkwolLAVtdwQWZy5S+B+p8X8eq9FYLOmV+4aXGQ0lzmCo4YWIXEPh0TwAsd0qc1d/yIwDqu+/B54SyTT/8JknXyg7hyJ/aR5YmPNP9OkVH859dsu/Xl/jyRYpVRAuD4MUsXpSpX43sI1iLGfWCsm/Zu2buZGrQZrcIfia19reFIJzScR7wze5MW8GamZBvgr2db64R8LdtpUo+vuPaf3+CIrlXfMJh1glN2d4Y7X4ZEtfY3k4zNRUx+B2+03SqotXEoCPaiq7qQYudtIsoy4/SeMA67x/C3Zh3nCAkdf7+k/e8ayoEdBBaejphOzpwWcd6xoib/baXYWqHRa7ktxY3dHlaa6Nn/UEZTTkHn1T+gVXDVp+3IF+8xdsn0t74ICYSXDG3sC+3Aj9i9m8M3d/VkiUFt1RfMKsavxfXV7H/9hM97yR7/CchvckOES/++T+vUXBQbmif7fLVVYIIZotha0+TotPuqeH/cFO396Sn7vbXTVOuut+PtF+/qtb9Skd/6Py/S9vky73rb4Q8IiM7ckud/glPJG93t+Xd9dTPd6L/umJK7vffX1iO7vuvRDtJqvflOT04Q7Eir35/f4+973vfesqLUhPSr10WLrptj0pdp+973k9ttL8vLLJ7v1+X2/y6dwj0Xe9+oJyXrz+qeXaLB967/yleVhv4iqfe60tfYsQEuXy5jtWtd98I+CcoZWjveVJs36/YQ7pu7uXd+kzlj1Y321r11/vIR9677v8Jkvdsdjpq0yPZd7y+unlPne4Sf2IEOvSLnfys17+hIm97b65PJ+kxM1k5Pr17ZWt9i/v0/WCfu93lbqh5H0ZA3+89L+SFF1iCp6vvyTGvfr+xu76L7yke/Xq86CR0793pPxRr3e+9bCMl/Py0z4/evCvj72E65dJBq1PqLIkkYe1WvJLc1uW/ghPtkxItPqlUxU0r8kVmcOZfUm1pk+TDSxvZjXHffEWU3SwWQAAABI9Bm8AV8FnixVxl8bMUR5a/KR+683LkMl/+wiSwo6RmmQmE3upN+b8578y3cfD739ykd7+4K9zgzuc24ZQfLtP7h/xnK8boaPicqZB+lbIgIlO2Huj/hMqTxkTGU6+i3mFr15YJ9zAvu8wvwnKLovn4V8EhpfAQ/5J8VNe4UybHYQAGj6X72Up6K5j90LltrOdgowv3yM/91a6x1r1ldqdbA+gP7jOeVdRErPK/c4a/MPw5tdullfUv/uNsYIv+gWrGz3fuXfaF2g7qODdSOnVr+jxxTCaFvy0/2e/fG1Sbun1T47p0rXsl9pZI+ixclU+efST4LZa/c/FqUvzZ2oO7y/7KVVN3yCXmER3E5hf4wktYetUELn2PG+9/X43S2Xp3ZL4wt3s5RkKu2lCL75I73bBDY289BKfCkj9+Bd+p3/cGo4HmQ/LYg5BvWT20mtaGFD/DRHskssfaoOi3EZiS1EeS0V7jcT1hKW+eoawL7QbSW4iCRsbr/n+k7PLwL94U+pc69Hs8pXlMp230boQ6DQaaUkIiHch7u32Rfq/d4if7XaZSShIoYOT+SsJk/d9XGGcOpMn7EMA+Mmm5N20d9tPjSXd0Vuk+XYO4Arp3F8S1UjfeDPzF1wE2rqtZ9PtPOwWU75ykKs5sXMWw80cfhJc9X2nxqCQR6aLr6TxrEZtQoNY4DlTsDu8RlC1NgbVzXcdcHR96by/02tUVLtczCJsaV9khM76xhM0tYXuksvplZ8hMvTPXkdDpOAm9XuY7T+Z5QNsF/bg/abS0M7Ly/E78I+t8n97pUKK8Lq/dKT18ncFJiL5Kve75l8voYSOy+5+G5dCsgZIH6QnHvv9/xvLeCMpAXeB8evNe/qvdP5oIcjbv+CvOPSnOGnRo6aeB31eLQm0rj4n2wM47L6ydF79ehHxZp2bem6L+/Qq1ToRJcn3fvou9eaXke/o8hXZI9buiMVQn24IiMOX9k9NSXxFHfJ9fv5PX5ETNd8IeCIl7v+CYg/Tf7e7/goPNE/3vf+qPqvf691lPu+qRjtk9pL+1i3lqCMr3peir3BPdje5l7dQj5BCdv3E3ve7utHrpzXf2dgn7ve8ffqv+vS5qsZP1NyvJu+X//r7KgSb3fsQSEPBNlY6cerffpir3vaqr6vpfXp6p9K0aCi00KyW1NKhqYuKh3T9bPu9viMI+F7DNoyYicmvMSDsUyO/nE4akUuUIcb0y273C/w6t6SLt3fV9UYr3elzSc7ar6k3qb04gl72n6whu7lObzw95cjLjJQWEi/v40gfcmY9wK1L2Vmq9q2EXPz1PB2v96jPckIms+Oe8ndzHFcvk9JbSicSJt9ymzL7lbqVQhys7yDuqdEMW95fXkXJ+vl4sj3n/1IL3e1kr89/1dPv1gpEXGT9bYMKERvDrwE9NSHx9F5VSyCt5fd4V8QSXeb305RJ/Eve+Vl0tkqnUnuou7fW4TLPfe9dERjXfWnRL3/ZaQ3bhZ+4g2rW5dtfNy5SSpfEfzCT+fs+Rgm3eHHCq6Z62VZKwzkkED+U9va3K5dfD3xcAAASXQZvgFfBZ4sUNRHullTl/1cxOMzFauX5N+WWDjX/5ePfwwvcEBIcdIEm9LwUQbtHuK/JcrcC4DdfqS3f40uM+vBiRPrb54INy8uzrdG2/astOMfozBaafK/tsrLGSYzcolbtZeH9cwCi2kB93MXcn7WnlRjOPQG0ck+3Vy3BGd31UTHRYZ3V0FJ+Tv8VrXV5f8qotopuO4l/+y8Y7CngkEUiBtpoyb/GeEvBLA6yxuuwxPdZ4+Al80buGBJecPgRxJRvDXy089csutfiPEv+9hDl9+qDnjFpK732d5BL+NtzDbmsrc4Z3B7eXjYg/1NOT/69xBfG7mVbKo1TlgkvCRh6/SrRYSn6BYegxK2//LJj9PVZeT9PLy4V8YaYSd5nBFvkQ9NBpBNDMN8g6qRcNM++4KyF0+7sknlxK2kwZckL0nftZOXzi+Y1+MOdlKaVdmDpCve3h0p7rsrCWaR7xumsDdlddG7vyxRA9Lz8q+UNlTv319CS7u92QS8E5A4899scuthfpiLiT8MV/3DiIwi6O42yM3AQ+rmfS9yjrXsBMJOaVGUyt18CXrhsGO8kYstF/GyUylJFbjbMhDjd3xgl3IoQj8ru3uytGwjwM3cddhG17yUO242UL8vXYmCmjdGUu7A8E9i/BB5Ye9q/8PwVYLctAjWf+SOgpYQfgmnm+/HohrSZ41D833+GH8E+AgedX3pZ/J+16njJkDhyTz0Csh1zPszL7e0pAJvSTm5qrSGwAmvuV/1tQzwSDtwlxNjk9fuGhd767/e/QIt2ucrm3eZSNmBk39tnzK268WJwmWbvNsJdAiEO/r8EJtgviouIawpuHyO1TZLGIWZTg/vDjRg0QezNiv+4Z0BuNZdGAu1n2Hx2k6q/TQ/MeZ+Y84cks/1699tZnrNbCeYNyLyxNieKR9gjgdcI3V+sWC3gp8lAkPK9+oS5QR3t4sv/4JDDKd+XZ2yu79tLLvMSXH+Ziy5/pnr/BPMLTi7vd3O217WkV7cXyEAXf9L3j/4Iyve1eS5fhBdapPwSGHqc7eWCEr3du/oy7b6Mvb3k61Wr3eqOile9LJUT533uEPJvMz8aR3tNjPp2WbsZ0BM7qrsv+UJ93d3P6fwSFNLxd/ikO3e7819L5b7+la6Pyeqr+buf6zXvtVd93v8Ftu/d2hHw/5fL7pzPpvrND0UQS93vtMqyndIrHt+/utNZvbVLS3u+2xqUpihSa/Myicdgen/CPiry+ewTPp6p5aGumyir3v2gXHrXd5ZPaojMRk+22JGfX6fChCXafbuORb7236nFtr9Lk7T3pXiSBElyJtSmnIvfvyWUHH+hQlpDSDc5ON62lPTROT8Kf6kUJnHK/v4sKGTrnzzfem29GKgiJ3cZnPu/shGXBfi+PT5UQTmvd2SzPfeOuby9rkJ8jGCBvBIpFwwk2+HecC1xeCB84tUiQsoYGnseu39T0Ke6zIr+lSl9ifWUTeqfqQllfpwr4ilfJzw0vzELH6VHTreXl+2iaBEV9RY3tFJe9dKa7/Jhf1In5KhS2f7qOduS/iLK6V0qL6r8PfGwAAATqQZoAFfBZ4sVHNlxuMqYlk1l9d8xNsxsMl/9wiTnfKFnWlkNOHW5Mf3HFD49iDWZZsDWDMXjBe+5YTYvJ/b7nh7a+c/jKTEDUZaGl/UslPk92xPvFG5+7S6GQXpJJ757ro4rHqEK35cXl2vaBZuXN9V70r+/LYV8xrl5q0ul/t3G5UM6QC4vaHl3b4UcvMOguQCO7ZWW0STLpq+/R6XRlK35k/ImPFpQQ0UL0/aRW4zbzdFpJu97CPhsxunAz5efTRXjuGr5qCPc4PRTrdyt/hK7jLvlnYFpXycu3S+S+YNSe1Qll77SdIed2OXu/z87EntJFR7agol707l6cilNuJ4q+9V/LehtBMv77Y8Vw7sxWW7cS0WwgV3VWWMILXo7QKeKRJOCpQvC773adtL4mXKC/osYctfzgwNTj2sfZav/J6S5eRE576U3knXuL4R/LU33u8b1dfX1gggj6O/24TC6JmA9BBrKDdNXVSY2N919JJ2U8dyB1n220oy80efWmfmZvgrkBkt7ye6rrie93nkyCXgiNdrj1vjrnx7t3OXROryeq6T4IJrMFkmCoMXmItbXglGuVDeiqqvdbv8x7tiExq/+EZwfDK/LgkNnS24b45BvfjCfp37gp4QccMMfMavnAvpNH36//BH2bv39/kr2l8fwRmYt2vRqWsbQjoYI9ibXkajbMnKow5LmQpSo4PdFbr5Dfz66+8s9uda+a94R8GGd8j4t/h+WCpJ48oK+nWthNzhc/vhybNINti240lzEsggomzvTpoIkm7DazB4fFlT3dNK7q/Zw//EPrJ/3h8txK/vV7CdmvFGbQUcGoWAzjPO8PXtfDbdv8Fu972YdbT+ECOWq8+9yXS3hE7vjz10amJ+lxvV7go52IbvfXvhr3CeO07v6OgpduGm3FZ60qXLXsrG1xo8oIrK2zBtY6TGzMeluFPD17Bqu69VZizqv0Bptq/r39uUHB3dWMWenzd/iuZAh5Xwi+8cbYz+tXfzu5PXjvEzHciA/92KIml9aTXxhpEfv7cIFMM93zj90w7ezp7EIFPGwiOz2icV7uPhHH5EbxF93fX+C7Bz089zCWzbtjIJbB3mPThsoXYH2O1BHulZpLbFXd93yfbZGJXYg3h1ClZltBd3S7Pfv1Rb731IsoQ8NZV9byw/0Ck137buXHLf98s4KC3dXvfaqKILKeFO736lJO+63ZRv37TEvRJd6+GtntHh80EJi/KEX6PE3fKrK76giK93OBHrJ3cJP3KIfbvSIQIlEP3ty4eHnj2gQ33Sgt17rEX3e+mnSX51ii3u929b7vrFXfSf+/M21P3e7y/30Xd/ZBVN7u7l4R8EhFl2/2CTblf3otL9f36v2frLS9QSEe5B7sv/0UTd37QT3u9N5f5VIYt3wm9tQRmffpb9adVr8mX1XI16E/ECQ+i4Hb3y8P7Y8oLDXvjaTx1e+uRkBGJuK5J17ZXuPFy0onl5cqv1Bdd97xbfYUNOljInhuKQUOT2ctajjp4NL/hPx9Jlsvrqo7edzvuSPf7EvP4Tfs4kQ5sgKtFz5kjdvFnyi3u+sxXvtoSyym5JaxN3vlzJ+vJkXTRTu/q0Y4NLQjDCUs7JzV1iOCMu5CJQW0tI3d/knhdwxX5l5Ii1eP49FL/kkkp5KckRPoezr2Sf0Iby7grgAABHxBmiAV8FnixUPM92knGknNYC9OUnHffNpGuoM+Yl1DsKZL/7hHpHGJBV0t/IjXK5juvwxF2pf37BAUw+8sh4Oi/1zl6J3d4JRtST/+FOcXL397bTcAN9x3iPtXtGuvcPk8uVsPZ8jPa3CWT+7TMZLw92PpT9llCt+W76y+f/l9IRc8J3fPB2iA35eSEKr3GiuEnGEAxVilLcz06zkt9Z4KzJbEVYjvkfPsNZ0XrZcGStf7hAu9OA8R9vPPZAcQfDKHA3AxBFzdw/rETbbA6esEn97rf+YsGYr9+gvjlf+myssJcZq3DwYlcPVdPGI71X9njL3eRfe/c6/xxV0lZeSa16gl6pZHhuLHTv1CfmNxXf43w/BWguOC3UTe0EgqELhY/NhbMu8fuxYvP1s+E56R/CBiiO658I19pyPMPgn0r3/8eeXO8gbSu41dk+3/yyuoYupmye3W715JeV/Tid7u91RUqVV7hGZYpWxcEA2rchDh+opkpy8l5BjX/9u1b4Tn/zZW+8JVrztcJeCc0+547wZfu5Cxk/NLC6Xy24rOdlp3bv3CJE7Gfh1pyFZ6J02vwY1pvPorBYXhyK8eMw29w7YOBGYiJ1VqeJo54u4NFIhj2br4b6+xrBLtMGfnH0u/e5vs++ifX2v1gn7nDsIvafL9r4RtDB3BRD8FnXfcV3e/aCkdFylEoTm760vowJahN6Zo6oCFcwXrT7/bc6wQ2Ovtl/y+EfW2YLM0IbT8ol8nd83P43reBnIAF7/+C4hQjWH8n0w4RX3f+PV40oUJhl7AME6aV/eFzJUkjiJcvXFuQXn41VGH9zqNu/dihtdtAm7nl3cEslbYVtjd1gkO6CKp8NPikQ22Sdf62vTQmWCwodkb+8gm7/73C8GY7398EOX5L/rLvUTKvAc2Yo3hzsQURXZf2r0syCzz3C55PJCRcnz5SCPmEE8n37aL/0gVZX5G0SaB0qozQe5fgk3r/9Xqj8n9e56v7r/L4j161wkQxuV+cL3vpvsSq9j3pwh4IwhTvSj8FJ5m0xXvNr7fkzU/gku/lS9yFvRUuoITFPP4uxbRcQS7wRE3dsvru0Xe9b4TPu7u+7/opAnm+9/3efwh4QEXcr5lqR4/yxhXvfd3L73Mvf64dHiSu+Tb6S76xF77v735oJPm4V+vxW95f9P8m9+mJ7u94R8FZLy+O0uZrvX21y4Jcv3/fXtFjdXb6vqil5/sT79arRfJJy8n7aVDagjEBuf70vuuxsJC3cvu7vsiNenveoR8E4g/l+N3Lf+tRN9u29VfVVQn+hZ7tPoiJvfS/lMUOz/S0TCT9MaVy5Scz42cY7OfxLCn+dWjN78oLTO980nb7MJmj6/J6/J70dMO13eX0mFBTvgh+fL5k4t3LzkvvcdvkCBUOfuUL3zS4T8mlNvIQF2e1lukeyJCO0rv4mYfyesld3/jJrv7S3Ri5DPDrZLtaIXX8LaiiZpyX7+/wUCX3eidIpPq29/Xk6d3Ld325ERlpP8jhf1In4i2lUupZ6AvgAAABNhBmkAV8F/mHXGob+bpNBjwiTd5SpenXqnjHT6qQN0V7gpLcwu4ccaZNDWl5C7TRb9MFfjee/TOnd8PwBFhJnyeSb17gnJq8JmHmbFKVbE1Tqq9lXf4vx3ifl2X92lhXzGmt6pf3tsbPOXjIEAhcF0GC7xGGmX0hDeZ/RK8W3b9RgHwKfUhbDtjJv2PU/crgt+sZ+BvnMdJgJtqPDnCd622hm4zdfbOJ/6N6CmqeX9Ydf5YhHhz375r0x9b16UdZ3qXa0MX7CJ/daeuvfcYV1soIhTKGroqaE+8DOQ6LNV4lfJ92vbjbgrnsGenvQTbFUf65o/0N8j9/6xt3a05g/BN4+EP6MQmLE9Ps92vp8kFN5i/LhrGFO/yxnuixHfkmlL0r1VqE97ro7UpEzbscKF/vbCIp3cdpKptluxW972N9srEgDWXHd/uHBKa07KPD1d1Lsfv8i9/YINLRnOHzFbbovKc9uMov37Q8rGva9usoUIPj5o19sCt9iWEr3hPp/+c96ccEjS+nE3OVfe+q8XfctbL6cpd1pJpwT3d3pSkkqk+vfxYjKGg3IxA3ZLkST/grXcffd3d7tb6sTu/LCE1viCCstHu3Kx9wwQkA96rMOhuvDcs9i36LBIVmcaDE3yPpOaXSEyj2ASfnOf0yBu4A9HEV/guLKudejpU4Ne4vlHQm4+ud93uCeUBOUkQNx2m9xJ6VHTrQ4msMtF1HnOFoZe3TRVgpp8+SyGA43n334IaORz7/lEtpvCNYIzG+9OKrBVGwRG4EZ0e5Suf8fTgsIGqk092J7I+yt8APbGYUt+RWjt8PBk/SV/BNtS1Pzn/VunDNjNH2PpxZcExRrf73jHdCiLkgR0KfLpxLBQdw4lUfyzphtKlJ3JK/aC8Els+DV94EP8dnXu2P64fyelvrY3udIOileF3bvuk8y4JlVHcd6Y+Cr7SfEkNMjz6Ge+SoqxqDkpJ51Wt1EHfef3CPixE/t6b/BQXdp2N7assIlBOQ60dpORf7pwUCePRPjhdI700jfq2utXrwUFe+967F2bPMoM015flr0I9grn+7T7t1ub3LGXb3ve7b3/ECWnd331mI7+y+iMpX319YJeWk+bZga1f9Gf8uHmOX+og7vc//5cve+sI8wgQVjfcS/7Qword3ckn3d3NJK3uyz1fxC61fyenCJVfdru+lzaWu+7qxsdR3u7uf/RSE3uEvBhl+P0z3+vFf+Q+6KsTSTeivVUat/UVl/d+l6+t8N9HepevVCO7oJCbu9729PqqwlcvvlyEfC8Ousl7TK/XSou5Q8sE5J3Bi2ky+l/kMIda/CAl2zwfG7cVvFG+xcSeifP1um71QW7et829ZJ6WHpsTUJSL5j73K3ZPv5L3fVZYQlbvveaR/20S0Cndma8+QY8su00XA/ry6vnsn1VEzbu7uEvGEG7lcIfMTmcJbE22/Hbn+9skERpfd8n3n/lEhkaH0LHd4y7K8hVlzetUg2n/fv39MVlx6V76oeZy/Ee99N9Nx8VksZJjcnpekaone3b/9OFCf2bfszv0p8rFjWt9V96kFp8Z/VOuSKvHX9Fd9OJRoI5efHs6Qt5MXR14gmb3P+n2wRHk+nHalvfs3ySX3l/JJ/2ULTH/wt6MdJMllj2nvyb+HvioAAAExEGaYBXwV+LHbkPjXlTRxdwiXhJrysBne4lGCj+YxSWHbl15ev4Y8X40ZTg+0Zwr3COUVAkeliXj+AQaeDM1VT5bF2LvNNJwdlwPaUfcOxr/Uvll5iLzf9z39wTzt3wXYCpKWy2stRlM46/inXXnDsbWEXBswthC/gxHUEfp55cEo9VTquR8vvrX5S5zbhVe4SGAi3Ja3qYEzu07ekPnOxwidiQcar75vgw7h6/u44J7m9JW/+4ibhL/Mb6IOdxSvUSW7op0Idh3f3dVJD/cuHoN1+4is6+dR1vRi7vW6l8/Xl5cD2gJeYlKM5fGE4BH+7IY373CfeuoR/0Mz2elfMf/xsIXvj4GU9er2McEdlrlmpGImfNj+jJhKw7F82YL/X1Tdnf2EuQHkv6oob+CgoT+TSJft4fGaXyp0WJmOHm/5WNfYvjX49yOvfCV7uM7/V4uEymZe7yFCsSe7nudJgk6QD3ZKUo00e4sR2QlAI9y3HfgtfqmlF/u9E7dqVY7t+T8z4S8E8q9joyN/VMWJvtsZDagEQ9uu9+rJ4x9T3ma3+V7mTm7GbL5fV/Gmp9pFC2bpTbuTtAB166np8alBMXEkNSnPF+jCF/U9akitxpV5KaOd3O4Ay6xyv8CLuMzPtKr9M4cbr+v3Bq/vzqtjScS3rpmpoVV4mrabcgMPehNFxfyO3S5YJe0dw60cRXlT8N7mCLwyo7AeP/dnOFiXrJBUbctWKCsfe6dAGH4NJ4tDd2qULYGHOKXBs3Vko+K32h3xH9Pt62ZaY98n2qZ64eI0Nqc7VtEYD5Q0/By0f9Or69owt7wm1K8KCB2n51SfKvbvhhTXbrBAabpqtxTHRfx8UvxDuAmf/q7H1/qFCq+xG9nyf2J3u0vnWYveeg5y5hj6ufmnUBi/3cHOoIykPZ1r4zRi7Fw15c7jK/17+4oieTz98JWOPW04I4CFVZ6/sGZb5T7DbPdOCEuVh5b9EhDyeXov3SRaxbovBEZu6uyfqZd+9PWuxcERZC06V+j3nXHkTk9tsbd/ola3eLQRK3ve7v1a2IEPna30SdLt6q+kRehFb4Tu7u2Rmnf4JS7vl3B2NL3+VdWUr76sIm5td97d+T110CO5Xbm/UEZ03dztVZLqnCL7sWI5/u95CYQ6R/e8/e6UmwUF3e7/qrBJd/uvqlrr61fr6SX+/6d7UJVir3bvPiaXXWjnBtcyy+r/rvHXamChfgXlnr7LNd37ogh990U5aQuxeT1TOvzd2V1JITL79KEPC8OrWceN3Nr8VyKPQSM/fhl7l+VycIHtvjd1dkr+mU73LW8qT9okud+cup61/W6gi6popVKZlQm7vuZvzP7KYSgUfmx2Ad65cQ+fhLxtGk2ZFbSl5VwSd/vavF1vICHe729f+C00vdvsnBmo3449zcl8/907Ky3PhtwLAv8Micv6vL//BObTdO9OOxd7v+qYbrc3kzrtI9IIiAD/+ZpwftQ+o88z/eB779YU9y767CR7vef9N9/fk+ncqJ1SDV4nk/WVdQjPFDu77dXvyf3avCzrtiNV+W93t8VCQk/736+iTFve1cQjctTD1emEiFdy5lh5LOZUsUMYiQQpN5fETfEdEyUdW4deQpMhYLIAAAEm0GagBXwWeLFblxLfuUy6QbL/7i+HoEYi9m5uM4wO6aCa5fy3TGXSsJgi0KRcuG9nR2KYa/ft+8v73gq5yJ4+8OxdcMbgUnNbvPKTeVBvBEUl86OpP6cvLBHfcqa9VR3L/l4u774rCvhwkwUc34a+X+Obl/e2xsqAJO2TKwRgomXCRe1Tq5vej9wF1c7KXhy4Bdzm0SWuHjcH7y7EGv4txpwUFDc3Hyekk3W4f5hZyh+trELv4QatbufYk9tWe4NB+3/gSbadw+o30nu2V9uN9DPwL6UfJhSvrSl95go9r4Za8/3TRYnnDb47YNW6S3eQefpL1Sq1ej3/dU66bEnalhHyD37sj31+bkj+U9zLsXCb9wiMCoD48ZYxLQiHadb5z3/GRP/ktl+h0b/22B+oZRwbpacPx5PHLAnXVnOQsyAC8rbShy+v1TvegR9itpaQW0ugVlhiX5asbDPa273eVNLkiITfbsFveQGckIzHjx8NOt85Zv5qFMgIP1p4LS3m2X6dS/kl4Tuf/LMdFx+CARMnP9qwdkpjYlI81PRP+5lmuv3ChXoHZYRcnZf7dgW7VKpfk2pwW87dwTdzHnoTf4gVMv3blBPcb7/znzQRAay4h8A0e6jHHbEZqSaDj3Fk/PqOGSa/L/QUn/MJ43X4aUxF4AraoTty1+CbwRfVuGFtIwSYt2t8aXHiu84xDbKfelu6m1L/R4bu4duigExG//97gih3bNpJ/bNOn4UuBzgJgpyfMO7WgfScVXdCeteBRsGPwOPwoZsYmPw3501zOiRFcgW7udnSvCfo+HRWCsnu1P7xkuisdZPtuxb3CJCe7p782WQ33+ZtQhd/f9YuYZTs9u+CkqZIz25ff5q9BlFgQ/LnnPm77HOm17p+sWUMy+TAc8bevlDRzqdFZZJpNX1mJuBAuTak9JpLPwTeO1eBDvG9/ZRbIXuyn+tXv+Ezu+OmfahHxYiVhvar+CQsBJv15q//qixbBOThP2+bI7aa8FYt+4fv+9mH35eRO3FCA7e093v5O9J8nr7rr3qveSjnF25BEvR9GaOVPaL5f8E5L3unqEfD5Hv5fU7OpIV/8Fh93P7338ZPS/yyFng76btEwye6kLXDLG3falTyW6HP3+3ra0Uva3wSZ/PFwEfGGdvff74/S19p/4KSm0/unu7Wwrz1rv8hKo9a6/dW1ZIua++8El93rzFRJy+X/yS3fhHwuR4dVm/z+/1zwVOW51ymPvyft+XmLd6rCXkmTb1eid4r24KybvAdVy1s25zl+qikMJ3e9PhPb3RZN1y5OT0u/k9YS8V4Qdv3lb+FNPTvL7ve8gvxh7klb6ZO0y2XHf1SvRP013GpFRqlT+rvf1/DwgxXKDQN04bQqa9+XXWI5ALCXx5awzt/++nCHXoODxXd+tizoLwmX9arfUoIh131vMYyCAnP8/8V7s5f7FdoaMdU+oQvu++71W+k3zFdv+ESYEtWB/1j+f/dZPVS9IkTvc8KXl9Ql93fCvkx+zi+EDOj8u6r1pZuXMvqkuxO76+vdkshO70l0Cndz0cvux3S7nv2cqzsfgR4AAABINBmq9KQCvgsXuLFBB57Hj13OWeL5TTUw15smkIUq7fwQUNIeimLgiCAUvQSwyD86JhHn5uNC2TBHaXmdcbiUm1/Xu/UMh77hDP7vRO4bi//0JgrmHjjZcpKxeeNqfojb60udy2T9X7UJd3xM9o/P5fei8EdvVIvmlkeHXuXu9fZb7QU8OEpSoii8b38v7vjfLwnTzVkGdIvUO9RISPPeJH8pmqbpdra3xr6y2mvgHAwGfeuv/G3TANLUBrYOLXp2pLO2v1Gaf5e0NNYcs/qz+z+vcO7uATi+4a9lwwoODX5EPQ90f+T3ztuqFdyLyL/oYXluHrEqg2A26UuFHgytvyHqV1Bd3u5Sfh90eiQH2tFjKU6K3aacsHuy5KDu4rEw3nf7pToT8/ej5PSoVJLPF33x5P/lLUvwmX/3GDLzU53uaPab5OqErXt+Le4pL/7jO/U2SgQJzO85tiRL0G6SJLOga1/sei43jq03bOvEQAvNduxDdtT9nYKZb941foNyF8gvwoU8EikbjXdwke2n4Zl9gRtva+9m7TmT1xL+wTYcStfoaI5xMNe0ErLfudRSvl7vtQkW7Pe3b7hPPX4eh3ezwwI5OMwR3J/oR6n5UA7eGdu+vsIleCnhy5+jzYRak4sY8vBL7N6lvBGuJSwntjBj3d+zdz+7n97w3Rh7Fa4TufjhkyH7wSSjkzQjcWDl0LYn4mPwxeX6VzIERTlMv7qniV29QS07XHD0qr94LLTKbI7bBH7/vkF+EfHhkN7jTYclV7l+R1EYq1uUa/T6tsg07L9F/y+E23uCEpuvpp/0luCAxl+OMhwTxDC+r2lkWh6utf04KygTf7n2Irb2kL0XBfaREv9Hq/jdtLiki5ZPSSf3IYg/daXFezwSHLMyL7VZYIoLs5/36GxRDOxwYcgY/7QEK5SPllDIN4PXh2Ynp9VCPvdva5aK7pQRELqUtXaoZsSJjIlc4/c60nqvl9WeY3D3fbyX33ghnf/ZPbb0usn7iq6kJvvHQd/9lJ9ovQk/wUZYvvd7mvLZ7v2ld6NB39/mgtu4ZRd9vugLT+u7XtvWva1eEOwT20isVG3t3Zf/K1bXfXgjLe8X2Td+hNa8lWye7tVarM3tYJ+7w1Eaxu1rgi07xtUotKynySCt73fL5PfCHgwy+ZluNoI1fJD/ZDPfywRldid708mhRX33fWSt+tUvebu+j+s3d1ZUUhiUMXG/v5CX3CPgnoEiefysQl6TKfygrub96b3e9OjO8nquEP9fS9iO2snvpuVS69EZ3vCT8pRsBF+dpv2QhE7g7epx3+w6LiLTh56S3l/j+ykpyS5EEBLz+Kz97rfthPxwS33k/rd9i+XpJ53k9KlxGtp7kEO/dCiDUL8/z119AqNW6QI/C3h1EV1Ko0GIc3lnG9cJX3Sd4UyXk1/kmI3vW02UXcv6Wc635JiXfe5PfpPxZ217r+Qzyyy/3ZPskLasmetaiRO2txnX0J9+uyS7rl9NaUJeMrG5f2Szj2G9j8M5JBB8p14go3APqnk+HvioAAAASiQZrAFfBZ4sVu9J3+Y3HphL6F4/Vt+Pf5S6CCfjboML3CBuPcCCwZTV8UH57zhqX9tOhxY7wy0OItNFJOCna8gkiyQItp3C8+XL4Bfd8oqL5bw9Sf9woTLS8T8hVXYwlSvtCgxBr3BDd8G7ZfvbxRXe1ecfrywVe77uc/E7o/8J+EeXcI72wK+CQk0Q9FnPDJhZf3fBAQVhwdQCb3nYRre6Rspp/4BtWBMl3ZaBbwolG+jZG/vsqHcEj68MGtz/zvUxn+4T7NODAWVkvlSaWfVZWMKYegg8dvheYOj6Z2c0pMMdNDsf3FQxc1/M/b7giufZuikv1BPiYn5/O2T7VSo/BgUMY993vLc1X+rdsWTDvR3PvfWWcVf+U93QYS8wqyl8v/2NyPMlRrNrScdlWiKvO2r9ehtUEBTPj2nXfMlmfX5f76CBLKqyFdXm9jtw/a2skpDfExoSvrCPM+9k7zAsw98WUM218td3pflWuCCyBC8rzfV2HYcpAJT4S0XHQLj0tYe8P+6npalThG93+E32py/e+sTcve1qPImxNsJeQ133+CqfokmT92O4RzK6zHftNCWeCgkDq2tns473NplYtjT0ZA60Q2hh2wjvgNKf8MvbgS2VlVkF8zJMPNmtt7h32IA33zcM3wl57CSVeVNt6lm/+8EGYFomDz73d1Nun9X0CvH2veQZPe75imkJyfgq87QdS51+4cve9ttC04UvDydD/jA5Mks1uEGo8Fj2WRLZmNYxY28cQSwXiyz9lBk+lot8aRr76JNjXiQaF/e8geHy5HAT/eabgi36/9YIaRPjfbXyCxbl9z944R8EgyrfqrCcrMFQ3l+T0r3JEwkSaGvciYOwxnp/bvlhf9NZEj5snvTu5aN2T9J/0d6Tvq5rFPFVwt/JHgO8kLaF3BVvy+fzj8xnP0699ii5n7pwj5hC13XaK21pwREHc+znXdFgoFhqK0/uHF//T5PaWu66wS43V9E9MupTO79YIs6u9XbkJZH3o8XZEa8/+SEawVmI2ViX/bJ5b8qBaUm38u+Ndy9awdFTHn8Eb8Hr7E9e6Gddtgm6Tw5F0Ox0rvvt17b01CL9wVEl/e99/PwV3u973u/Y1o9dG+Zm58a3YR3d7T3n/pk0ryeqR/qrCT9f813ek1nrG3RtBDd3u/K3xHatZL3hHxXP72/IkTsn1Te2onN92T+lr3ye+rqSPK97vn/6kK73106pLSSthEQ73hv3fdTh5JJsX+Xa3iS3vd4R8E95Il2HVxn0+vyiOf73wRnll+241kfRdvp+x8SJe97+3u+90Mn7rsIkd3eWd73l9uIT2W8mhLwpfKcafBGqWP8JeWHq1/igO/iTaKP8N28SfWUmnIJEt53vulrNfftBsXjvZ7k7L9Fijbt3v1N5f8t36fd9FQIBAykPo0i4Fu/ec6w1+X2XofuMO/u7uf4T8lo8dZEKIXbmzHF78kSLk7WQ5t7E9vvpT1tJakI7vk9UkvmKW79U4y+fhYvvrglES4/nylVPti5t8z9v/a5JheKy5J67akkghJLVdOvUMZLyXovkjfvScb3xEhQ5o938PfFwAABJtBmuAV8FnixWcXdKyL/mNLY2ve+bidmwM+bZR5fjfD0t+4TdbEP9qbtJFhqdAQ5nwuc8oZO6pze6raYfTD/Z8v7veu3HyE1nyA3uXyrpXyykANpLHv98/eq3LeRPVOrO+Ta2lCd39Vr6F+PUvHl/2yYU8EhtxvIote43O6OkEF5/Rssy2tLZyxaTVUlXf4VHjVcBCf712mFmJ3cl99TQM2v/9/4T+rRUZwR6MLoD3TZkHXYO1vY3nkSyzuuPvOJPhOXWs83d+T/OUjr0g9Xp16J/po7LG/CfTX5NOJ7j9WEnAEuz9/6EesX/9C7+VbD7ak0kfX8aWE37z1+Mg5W9PjM9KPya/auoWr8fcPQbr+ppK/0k7hWUflQPCM4X/9Ljuv+T9Sy/CFovfnjVl3W4vjvPPO795fX3E33LHHz+qvxZdWsuQmX/3FDsSTbiXH5f3exunx07EexMfCPYyN7ptcweTpIODWBF+LXsLqg93sr/oel1/+lrF6cZLbyXk9JJpvzd31Qgrgm9P6/lrpy+a6rXJ6rZNZS7uvBYY5UoalWypnrAW/DX6CU8M5Af6v754wn1TTQnff+O+rlsXKGoddPd8Jv8FRI7lyJz+i2Wxt0DtM3+9OvxpH6v4fRRhEYTWlErcN7w61gm+ha4CT9Ue5+vw8d9bhEdEgcWq7JXzyMG35T08rQQe08OlzLSJ/6E6SssLYPGwJ3+nG6oPviTeNviO5WjQl+UtW+ELafqO86wFX3JaRDo/YmQ5G/fuizwiSNoPe7jbo/xtwezjwim1LiL14Ba0wOe+C3iTyVppb/ha4dz0f1rk268KG2RHz/vT5vYOUKfp9x8PVI53fd3vUf7/hLwQ7Um2KorDmOlFyXZ9//BAIw1+OqeVqx8SurxiB0RQ7/1+8s9jWJ2ep/95wPoT6+sEZ7kDe/X2JS9+KuqKsqfwywrdbgjzBtcPRyfV4ISz//CJf9cohaofwSFcZ7v/p7cpR+n6xX+utkl6/NNsn61rrRe7GoeZ37kUoVveX/9Fy9+ime9wltgmu73u9zsawQll/Xv5v3VP1oj9e7Fbta5N7hHwTS+M0o6jJww6BR2/wpyY9y+58+kft5F5av7kLl7p++T0tb/rNu/X9O7783l4R8Vbt3lY6lJef2XK8/7otxR7vdyS+oemq77nzeTKVemq/jZ6ammwdvPkyJxvblZfNH+4Qve+7363zOILayEvft/mIQcNgJx/5O3Y+ECu9w7lsZx+vvo3K5d+gRFcuvjt5IQl/0zrz/CPhcwacm/D+XzJsPjqXn4sRj9N2/2Cg7J/ELG9KraIeldOu3u/XL4ukBN1L/GcSsbP3t/uXyRBhIv0/j6B0efulb5fvxBARkq52XMn1WWuEBOf3u/LH3WWIPesuLHr8j2u+0nfdZUFxRYBleQ93Dr259S/6RJ/0wpqGdR/Gvp419L2xx88LvefOqi2vI+xaZHX35Pprr6XkiCjuPq8rvX8K+Qk37ytSEfSvSv0iCbsu1fnzQj+Kl/3Jsv5JOpmRBnyUhHDM/JEQx86b7lEH3t/EdkQlZL35LtV/JGF+CuAAAARjQZsAFfBX5h2MSu/cIl55bvMSdhz5TSkp6fN3KMhnzeEPB6aX/3CPDnmAkiZy+hlGuxo7dL3CkPsSvwnQbmPJsQch1vbkL3CF3yqPgl+PWP9fmJdIQqNieq8pwo2P1WTC3gkJkmUJpcv7vhEgrPHAjcbm+KwTfM+CD+UoQzP33s8dLHPkrh5f73CExcry5eUx8FHVf2R4f+HpTfsq/EalXAh286/f9fpT8OlzmiqXG5NhMI+P2f5yQ7U1PX/4Xgu5Wv5lO5aXnM/1TnidJO7/ySaZm6TnSRbu5B+X39/uEvPS/4R7C/TG5eawJDfpzbjdz1edhwngPqv+6oEFl1k07WfXAbEFus+pGxR67im7lgbedNvpT4yVlVHGHk847eu2hppjtPgt+gfZLA95d0f0stzfdnsqWr1WIlfYEWvnxVNMn6ajh6H9hLjzo+mjs8adhU5NNFjQcdIgTccE3vrrdwH/+o0fH9H7y9EWfuNAy7Q1rUweSL6pPBbIzLYEf8V0DGLlMg+SVFLSXlvg7ZPTcsW8Ugtc303//PxeuHuly2Or/TYqCJ8q1u3460OGZfI+nDF93OXLmWrcPcn/DBc7kWWbo1X7Z86bE2ghNph673d8EHy1KciEMaam9w5C4uw67m29Z/j1cShiRSIBHrpHva3LSKjrh7m3ZmP1W0J7yAfdPOSbde0C+7u7uAKL1WreKww4r+E9sFIp7d0zhv+7uX9unwQE8fYw36ji5ecgovgj0j3yfpEX4eO6i3217AjRdAZurfQSC3rH4Y3v6Xwp7cz8B/uYfgheXUxRYpPTd4OqBDfcadu3kWC7bflAqf1+CKO1WKZ2x+Crjx+dBhSrcRLRoQwdS/2n6bBaQA71oxL7zX9qW4Yf3CL/Rbwj4I9O/ykluCLe8vcUaFx1MrTxck8Nj1eLb13OqRRMol76aH6IIuumr6sakdsn7bXr1QskZnQm72vvKTMU18gknrCPlES+mqxWOXcvfuwUHCHxx/NeXXpJ72mpZuXdtF6915PTa7/vFEc0i/Ke3tOzkRX6LhLwmIe+r5PqtefpwRjX3rv8RZ8Iv6+lr6+/stCn7fxVXqtXe4T3d93CPYIiPe/lQJru93d36Xp/oEZ7v7quteye3fYrav3r/3X+/oEZHe75PpP3EkRe617a7S9CPmlYlYrLLe+T6+rb9/aV/JXvVXeq90W+76rWRWrjUi9ra4TfbgukZmRXv92JlO7b/XYsrKSe7r8Z0k2QhNp9Vy6SuQpitG2XCfj778Ee8N7bLvcZzPbG7/ewk4ry+/IileK+ilCBeHUlx2YTLl3J79qT9iff0vYnttaNd+T2l/JFGdxDmNC0Nl2JydidTaqmXX39MaEfz/hTTBIEi/9k9a6tRI3PLe6vITn+nJ3eT6Tm8/J7b/iPVqdN9kYkt3bLDfaUhLl7cNLC+oSM6Wr1tM3Zb0+T5ISKfZL5rSraosZMX7vZAREvdOq1DWGcRJySrJZXD/7gsgAABFRBmyAV8Ffix3LWG9N/lKs8my1oBeJ+UjwVapOgGS/+4LySD0bLiSVMJHBCYcdesf9xpQJ17ka7dntPSPuxUx5TJEfFeOj8O8BcjyskOM6/iJZcWkyzs1IrJo9W+LMYbyy4Zh0dfgj4x44WBvvLBMfPunIho1Un914nvLxNwo2LS97hXwSEzBsbyTZf7dsaTNFxLgdnqEte/ysZlD4PErKg/z7oDDnZrbrOWRwiZ/gzFA1o8NV/3G1bgwb3VVbw7NZu53440XXtWMXZ+NoP01njuoPOrCy4fvOM28udUZYKSvsblJWEkIdATUPGV7rKmqfNyA4vpJ8X5JnQGHvpySaZy8ntp19Pm2lpZL76yn3KSCb9MUOC7S/BNsn9sC8v122ECd35P0JQmcIkOyslocWo/uUrvn8VkFJfDbZWaVJiT68tfe+Xh9bXCXgspn4zj46r/0xYmX98sFU4uaOAm/b9u663fCNlFt/fFAy+l+GDFkdh09uAK9qtjL6l+w/cb38FRTRoMYDh1lZFzU9jBpvt25YOe4d9grgzHZb6OJSEPiq3ETcLDN4f9i2EiycI/I0m7v5hh8gdf20Ntgg6OXGOI8LvXP/x19kKTQcHZJ11PSVPP9k96G+jof3KHiPH0y/pEtS/G3paRdDN0uYqxghWkWENnPNcAhV3uDd6EsR7o7R3j9i086D5rAid16/HeV42guzu6RSuBH54U/9FX5RbK7hOisw6Ws/tJ8xMNRbz1i4SYZcvCWGmwMVk+l6cJibvtRs/k9enWyCNv6+kXuhNFrS5vZ0Ym5l7a3GZlo4J971OEbx5ElacmQSb05SveEi/++X8djiy929niiQjeZ77vdmR69pvet35fL69utda9k9a99HrL+v9/a7enhFcuMEZfN+Xtvz9fRS1vJ739/vfhHu+7yh990VZP7N3Frpe/2oRfs4Kd7v/d25e3tmuyf6rfWuU7v6PCeel3f0/Yn1eT9L6wREvf/ZZC3d91+Cfe971rdYQfpAnu4rd77N9o4q5+nd79sXe73vo5eq6svd9fWruvycnttif4MLvTBPsb3hp3nk13Ln6NLH3p/eijtk93WRVBF5WD0I+KIX4+Yt/zj0Iy2pqMOOZTmRcuvTrsr6ryenKJTfpK1eOoPeI+vkp0T6SJSHd2y+9iGEjyRC4DN57Ng5pITJt9N4R8hY6v9DSN51Jqm/cZ4cVLZYQu74ib/nEjcbPpwRvULJL3THcdsWa/cny7e/LDol5d5cc9+3K0V3/1BDe7pg65PWsViUXpd1spyr+mb/F3Rvapdao1VLREPEPu46XQ20dY6mEfa4go70rNKysa9vcKL93n5Igl7vfX0OFvPS96UV5fr1L3e91FXugs/7EwntvPFOT8nryetU91vXy+R8IcL9+SFu2lhpuWts//35Pev63qa7u736gspXw4LI/b6RdOF/BDNrWRPJd1+SImOn1DzwutakpagsgAAAEYUGbQBXwWeLFGCt0jSStKX/zymHH37K8vF6lJ3ml+buWIY82O+p8le4zzZnJFgEGbAzujx1rL3jcIe528qJ5B8dS+46E7rfbxnrFkip6q2u4nZ/54Xtz8FJG0i7fI8MLlMX5QdlghtrwSj8JHk/y02JhG+au6TXevaCfn27/l0iSwp4KCc8MwhILpd/jCctAqMYeyC+Zi/gbs1gJW7AHAYv1EmjHO/2wRRllNET+W1L93tCbRRClcinsNe5gv9bphWSXNQ56nUqvFH0N4p/5PpN26KwoXMPbbT0P3UFL5pxfzj+MtF+FttN4TxbvLHywSTho6XSq2xfHcfE498Z7tn9pru/30m8hefkH15TzIE8Jv3Cg7n0V2W3nzfhbcNuSWX9t8FROy4gP8ma+7xvZ19+jxJSLnQPq3psn00XavrZYezf9OTd5KLFEUJmGVfAl7v8gSXH4zWFW00X8MUXf5aFLQwfCXgv3nEb37H0t/+4UIK3u93u93f2T+rVywQbuOj/4J/C5O0LYTZHjeCFe/2cDfmuT00s/cIzr8ht+WI2iSde5P0iP8TYk3aw0v/6SLO7Qw7yIgS/VvEfRESG/e8jQ+e2/73v8O8ZOT7b573bDy3l35Kwy2nY2QpLa9/4IiTqVaCkg/GSODiS1DLcyX/zSJHfFr7hrYDENr/1Dk4zZLF2i9jTekUrgNfxJDF+htp0Q7ByBoEG1rjPZLg9x6oxol+zp8/pYTfklpun1gu1Qiaubj7pkb6l3sn0mlueEzbmq9ezwVx2/rRfAYyA2tzZFDzF4Eb8WPd+CkW4aZ4uO29u99V4IxDT9zSvgjO5Q0/FqnLRH/Fbzlc7hmVdZY24tuRvy/nC6stduCHs7rMqfuzXlstyle+TBde8r9j1CPgrMfZvP+9un/BCXNlmqLFwT6XCtLW93rafgmvHXQRcnev/ab7C/PY+96xrv+i+n/BHu/9PrXsnu2Nv4qQtnSjsy7wt/BDfFkYaa/KuQRu9zpHwj5TW73Wos/Lj7fm5PSS/dYPIii3Pj+nrcEQh23v6r1F9L3Cc1eSz57GoEMuX9qs0EN73GpOkoRf48l6b3vfsaWuxB7u73ftmpW6sbBFI6SPXf3mz/bSy1rblkwj4qf3vei+X7iad73y+LBQ+l2Lrl3mver++t+xaBHKLbuNtbdcvl4R8Lw4kw2O8ZMdZp3/5RHP9+oKBJsVtXhQavD1+xJXfvfRUEjnxmns7692O7JP3vQjpLkKWSXuuhBQSeNeKnmY4273kA7hyHWcLpwl4TjJhuZ4Da7Aa/wRiOHu//ryyn3e6X6X3IJ2N7dFFfJ68nqn8r7S6C4hx2Hyks/Td+yUnQff4T56bv6cKF838hnvvlk7E9dSmE8nS/21k9NGufPSiu73ffXC2SMhCpnK/Oa7O0ab5h5HGJXpXy+XPJ3Xspdmrk9pRMi/X5io5ojcRWZiIi7uXHb39Qtk/oh0rJVIksVyFW019z4/Xw98XAIRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vAAABItBm2AV8FnmGTkIJf7X/NsGkNzi8Xxr2ScrLhkv/2Cw3G6EkaY00Y2OnfQisiKMv7tqEr78aayekkV1vr3FwETvl+eOH/mXk/SLcuizADRoujfKgiVy+9yj3u964JJhZOqOP4yUfyzx3e9f1cK+YmM5EH6s8v7vYICBd8lFwCJ7ev2GxWf3lmAZlHF3Oheo2Hdg08m2EfC6D305Vj/v8Z7DUmtdo3B5a24IH4qtoetIMe0aU1eCrl0KSbxPCgF7nHbDuoiuDT2luMLmCZUDjZbPivIIM5dUrCPzTcU8Ly69wl5efX9OCLu6VV24Q5l732n2kW73feXmkhPyDvaEzRffW/ynxp1SEvDhuAM+uT8AOY8b75f/cKEnThI89aq7zzPC7cb+3p0W054R1kfGV0HvVCgXInTe3jct2ngjSUxn2DHMq+vYwvyW2ZYYmqhwsWrv30WxrX7Kzh9w6yE8MuqawWv1Drerv3IXuojoN1yE049UIX6FVZWFOp7l9yzOgvsW9gV0dOtOH6CL79yBO4iO0g01dqf7/3HFmVwakH94zuw77eK37EwU7hx2+cXf8unbtxfHweshYjPal0WY9fpol35PtrEp1CJIqCvjTvwSvLcAl9l41BtNq4zj5dNGZfPjPBL2Bcoaob5P39XCG+4wZy5dKkRp7hN+4IhT2X9fQveaQF/8b7LttU0r2Kvjt1CER+1uO02Kyelf0Jl/IVOL1q+6slf+aJF3fu6S7giJIvsOZgoIX+YMmT3/HVXYMGd8XTPGseqc17WbrfJwj4JvO/l9ivCmcMXRyfS4O43i+vWiQ3W7xpQQEwEbzXbeG6YEv7/n/df9mH6yvhP42+r8FdfvQFMDfldoUGuNfPR1OJHHwZNcYBZpvvxYvQtKnpGKSemnZ+2QU+QW3goj8vZd8wac/BQJyL44WPy7FwkRwhZv7uL7ye7ru5MMyvaaWWrk/arFyw/tg5vhSVzDg/yIN6nv1lSsPc1j/ulPBcVN3vexvZaBPzoHeZnKE9sEQhyj/220qCBTjsQicIrBUvfmj37F+3029gru/d/P67cJR2l7mTf7y9yLVYpYrKGrjMAvye2n9OJmPn36gue7wn4TMXJfTv+Us9V68S9/69/bq8siDZb1bto9mHuV/vFaWvTKW99jSby2l1+W94Q8ERnV70ujwkJbtJbY8h7F6Ti6ve+jIXd3539a66/rbi+jayf08m4Liu/d/d4JMv69V/CPgnvK+8v//BEQvf7dloqLX4IhpNbyNr1IKe9a9db9OYgJt2d6XUEfdG+qy9eTCWkUmf68rZ0J3P5PVfdMh9p+rlOt3W9F+2/++k1LdeT1Vp6Udu95EDuEvFkUl5snpZ0Y3vd3CXj5g+Z9+G6evGZ9eiHBYId7gn7ZN93dO+6yxJ3ve5c+Qu4w7ui90sta5PaxJOiRRHB2H34Yg/PtfBN7vsnLwW0KZkLzw0r0/sJl2n5f39Pr7E6WoiTuQKckt9wt6nBWSC6+8ZXaf70lk9EmPe9rfl86T+GPRCJ6kqTNrfEFLQ/l+MsS3f8FcAAABG1Bm4AV8Fi1cowEGzP13+bPpx5gkvF+eOYVUMl/9wWGsiiLBco9CfOTU5DZfPP9MMv9vKOvcSNanGtnvOL02dtZPu3E9wUULLhjL3M2T6fLUv8pT6v8GHdmao7vC7eb/XuXO4WXuMEZmuEvgSAEH7tV/3AHPV+zx/riSHYFGBHt/GDqLgiakOAduY2l4bZ+61dcvv3jp7ZOLtpLGQcgRlMuy/v3G/CdSbIn5SMHHfKjkskb+4WkLdPMq38n9a5YSKYeKGZGQ0gwC+mnSReIlhC5AVl8n6S15odWp/Sntvcoa5P1dTpxMg5u54fLLd73r/lPuiCb+wWDBIPFGKMVy07G6hWlEiJXX4ynRKBGxLDqaZCEX3VZaUHodgjom3hVv/3a9nw6aswqiLnZxnIsfN+X37wlfc1t7f25/9yv4ySPch+/OqKneif4gsOQ7j1R3S6WTmzLX+WG0kP69V4okPL7vaAJddpc7v2htXi/L8AvVTrf6BdE8uP3PF/v8LbJHeVQPgqLh2XY/CXgiJd3ff4J4cRdzzZflXZ8f+9w4Ro3BCVDI+my8JOv6LNhlKDTXv74woyicECL86h3ltT++tU6XkPob2BmUFffQ3x93jWP5eH750JYG7vfR4Lr0+dRL6q+nJjSZ9pXQKefSX42LjNtA8Zy75PVJd2wqRxfUovUaFSxnrnov9qVCCPVPl/y8wsr9wkuWIBEZqr91olb1wT5X0tXQs+BFiMf3CxMaC67pw4xEHSLIvHfq+Sf3XYLBb3vs7u509wRiMMMweD8EM//fgkPNv/WvutwUzrkq720mMJeCvUoLMf/HkYs0gcDQwzhRF3pkOf19AjE6R/LyS3vCHmx2mj+Cs1OnZNI6NvJ6praSv9haHk+Xyr0X95H/0euW+36PJeRdDl9F/BEUlvd/wXYQ8rke7u7dOfr2gWTD8YO9xmp7eHihG9/wQ2opkbu/BJ03lCT6bFGeN9t27paBWV9IjNxu979FZDuvek5DZc15LFu99S9idKqEyiLv2LZCPvJ92mN9FK9/aVx8Ru7ufp8vta4KN7u7y5+Jn/P+EPGE3bfbHVa/L4qw30d1kIE92kz+5cS7L/BIJd0bmF1IIu56W7v77rXstE/0T1QKIRceJHu7u7fl7v8UV3u930eE93u9wj4JyXeXy/VbYSvu8V6xHKNd3L9j3trLIIe/Y2CPPeW73s76/Eexcpr33k8/07Lu968I+GpnlyFW4cS/kEZun3itK73faZim1fRpMuP0lo3e8/b9ZM/9P5f11m82fCXknDNx1HgrkuyZTCXgul4VVpVrDwtFxOHcSIdw4uJ2154XlcxTvfs7IXM4hbhD+Y737L7+9ESrdfkNDS2Xl9J1V0cIz9KFF/rZdGy14kT2Efd+M0776ERAm99x6G+blh6spfwstGwmZ3euXeZfhcTd63dfL0/7vZ7Uto17/QJLvcqZPXqqUstD/y+q/C/qdPyTF2fyREmeGlSoay+omqifidzkfiJMl4LIAAABWVBm6AV8FfixWaIJ/3oCFlIjqdK1Ooo6NfNGFYidWg/i5xonYRjqdzQNL/uWY0vs/hE+WnMwYVMFbpfm4+XzCEL+HCU4QPLnP8PvfG+GXLlJIkyMPnHNfrHhuc2brVZidKjja5I65Ct3Vh0USEocfkOfFbl+42bvS2FfZ2rtKfrcUQIvXaYpH3y5n/mj2snb96lh3cfKCab2O5MKERf1c3rvG8oyHW87CVvCRS0v3PcL6ruF2+bccLMfHZWpz9v9Fgg5moj+2h71hfUqOH5kETCQ4mp7+18uaHL/iW0WfN/ieTL6/KU8nD1PnaFPMK4dVa/cIkdILKAS/+vd3Lx2MSssGbyImbdf90XaGLr+CvxArwuhUF1DnX513Ds5doouoot9uI38CfXMfr65X3CJZYEXYGjhchyWDwFuWyfrq+I8+TLzEO/3NHQ8sQfT+S+b6+uiyb39lPmZO4J+MFXhBxLtUsrRjdH0bHq0oQva85JEo1sPVV/tbZYye+TtKETudZuJ4E2Wn4Gw6gi+iWt2dP02YS9WvHbovTn4zni+3CW7xCPaP3l/odvF0kVNjSki1ZYJsvdqfSCfxMHkpnYC9yF9X58JfLX+myc+6V16cVe996c8ntIRvPrv6ptMI4bRfv4ag2TitsI6xyYxO6WhhOcfP5firil8zv3v27dyoO9rLkkNPbguEy/vZYUM9Lvs4Ns/2nvs20cpWF+u+GmWyFlUeHZeAn1PtfrG8rHIGwivdoBB/p8dH3Iq9yCE+33shlf/fzy0Km8tOSjy+ZbeCzx2Sr8m8fvZ9p5NmixGYX0OLz0c7Teh/76ChuTh9i4+CH8/g93LBQF21NL1AQ/+5tqGqt9nlzabOnCghonn4f2m3qi0TStGiLLvW0M2n9xpMIO29sgUMgf5eXw3F0l4PrJOE/X5l/8RBaXP9711ojunDk5WVIGLqvm53fJ7bQnukCsibWaSkrYXaD8iNnTLy3rXdCRp0kfy+Mb5P66z0KcfDEOy+v7y3GvP1/YuCM7LmXyyf1+eWzf6grhih+5ymmX9aSLoWa4BNVvZAvnWg/zf9KpJhPLGT10qyTU9wj4IxF35b8RKdab2uq1q0hcFcP8WikzUCJILo6Y3Bl/DTZsduC2pTmgO77EI+ZOjsRWuDpb3Xgi3dpsn66n19aVtsf5lw9f5VtGwcjsH/ghpr7S57dzan5JfvCa+gS4+vtN3buyFL/J7af7+M+19giI92u63xux0ubXguhmcnYXlT3yyf1+Un0W+nXVq6EnssgzdwmyHlu9uVEK7c6NHFpk+i3RT+mvl3f1+xWXHspq7rXfvTrs18J+fPZbv9lICHna4wU1ebCD+w9vU+Y3L9/iZ+t0MV9IERr3sfiynpd73rKsw3JV63a/HCLx3pc+7XsdfayRzEb+wT+Wj2X6XeyP2PRiO+lai0Ms7N294+SnD0O97+TrhHwTy87N46vrL7zl0URbit75sJnYj997+4JDt1pVX4m73e737TOYy3J/0gtO+547v1yb/37lZJsvv6MtvTfRWCzw/ve4y0PduVNLiG99wk/wVEO3jInN/j5js/IDe6YKDO99N29Ip3u9XjZD46tkW6Rb31rSpluXqxRpcnHoyH632qWJmDjL/fbx7hTJeP/f5IRz1verbr6YIxLxLlKu4Ju08d90qq2hM177q2glLjPWduT1tn8TLPnr0xO9LLDXZKlTS+Xl/7hp3h7twr4extfkOXjRYe/4dj8vrpOONm1Lu71Gn13/kEzduD5Lugg96Satz9FyfbllpkgkruF/fjobWyiaJKfFLlxxOkHFqNfwx4IppivInkkIkq37O+MBeBZAAAAFY0GbwBXwWeGxBqKw00z+bY/Oy/+4shP7uXF5pi9DLgY83OfDd3PwUEkBqfsjV2YJOKwEvvN49wvmb0tjy2X93wjozgxh3t8qZN+B93qL63cKFbeA3NG/T0gSe/XDsfssju44vvcIEx0SGqUxdf867yfvufYL97mPu2euK+H/vYmHSufDba6kd5oMrTH2p/7gnz9HE7u4rtekH5b5qbngK3uLU+nNh6hXzGpD84wPy/2240m5eFxXWyHue4XUeSC6Md2CSaAznK+bmb8kWjkKu3FnwMi0DMXRAheqs5r9bbQUpUm1Z44xC7+6lK/OFxD+j7GT93vUUDvIjTDJ7udruHfSmTwBu76FW43utNyrfj/9cn1plaTjC6t1uD14INzZ4aiOQismX7D3uCnoJ+8OH/utgf0vWOpa793ljJXk9NLv+iwnL7d79iZPPOr0he4Zcm7z8KeERGG7EaC02PP3pHgSPOzFXa6bCk2747s3mMSS0PGpBzIz6S4A8cof7LQMD7VGxJxRda0o2T6S29y1t5IdwgUoFbzAUNZtdinlC5kEn9/i2be6r6yd316o3BZpPykjj5WW0vgMf5P2ypvdwUEEIAd5F/PDfS2FqVZdyFXBcJeCfDLlFxfZRRj93wKnL+34JyXZ43GTg6lO73Bhvu1AVURAfxUd6OIfw3rL/Gcszisg/Dcn5gNmHno4X0Uu9Kd4ILsVB+Ge1PCt0ZFW69+2m+5bU+f8Jn4bXGXs69wVEmXw02o3efv+Vj/YmExb/y291XuicYIMyiQ5f1B9t63xtA+p7byBn4ICJJOgzeRN+w3ULaKWvngIdcf/hc6KaclZZ+nzkNKJSQVy/5dFFsiRwiX/1BEK0z/sfr2ki3BP4ZgSSNNy926zsKcdbP7kBpV+4hFtg6H0XJw7GsFxgboToCEny5D+CGvVjbGWPaZ7YSEVSSygs75PTojxSwYbw9hswfvX1k/qixdi+T0leskdD8tz28w/eXCne2haLBWSFyPxwvc75I++wTCc6jRXT+SMvfc8crF+pcCL6TBcIfJ8frkqfTLw1Wa1b3DsEmVd9u7JXe7Yg39x6TkfRP/6WVVhqst1Jul+tW6wRXIrfZ20ssnuknkqCLbRDBUd3gl903d91eCze7u9vfKEfHGNWXj9K19eu78sEZU5O71R9k+sX80FJXS3KGu7WvJH7ru7V7y+teS7+vJ7SZU/j5Rxxh9WS/LANxXbfwhffe97/Fbd3vv8FF993ZCPhQ3P3xpXU7U3bHcy/yZZr3qve+YXSd+3VF9aJbr6xcYGb3vJOlURBFM67sp/gntv3vUJdgi3V3N9UW+80cyYw/RQiKD42llj/DpWjdqoPVYS1X37ye6QspIlZB00n/Sp0vBJzwlB6ku9+pN76wSEggvfu352/oMFk7lLQYe1Dv340viq1TuCsruYp7u7u5zx7euEbZtbu8ZpeOEfDRLsY3cf5lzFaO/yij+/liTuRd7P93vyxAl+13fqa73k/onf15nrUst39H9l6bfNfdJdS3C6eA/y/rSmBDQe6Z6EvCZC8/zw95aYUM7u6b5/u9zr7+2CM7xW57emqOn1Tv2LWvyXd+vSWRaollFFd937UtoEQqNo3sXj3GZGz/j/u8Q99zvu/khLyFhVq8vrf70Ty+UnOpCiH0W7M0UEnd0w+8nvi11go7ve5xb9oZe/d93yw35JL3/BbmfBI9O62mjJpJJIdP/2p+8sH763vUK+fU1p/9Ixkl+4S3vuXdi0Y+7rdSpk9fL0hZcuXu8v9bQ/u0f30cfnPhj0Q6eSGu7u+fWvmgljxEj+a007WIiCohuWTd1p6ExsP/FwAAAFhUGb4BXwWP3FiIfzZetjjMZcH31+UReY/L/vizx8sPs4MfDK9wibjoQgBBvB+4eOu5XqIOvbTwQvSlX2M7ip23c0cw+vxfDuy3e6h8r6UUU3vlqWMFtJf6cFZMJODEtJF5GIfU7jazhRdOWcJ8wer+QtNfcE0/0dy5ozr6BVvJLCXc8+YraFfMS5GpYj1/jSVuJHhdkCX/nzG4/LgR+9etCFtIhk+6G16eZHJ+hreKx6ZOPqC/GT5aCSinHked/h2ez1aOmHLPgIrmPleIlcTgiIPwC0lfH736e4XasXYstrH7QexMdNtK6la4AEfr3Yuq89Ts68GE3Tf/jC7QaQXprNfRGDxjy7eQP3emlaNIiEYL1WVh6YzY4hcZlkViV85SHmCToxB/+b08vq6eM0r5t7qj7HAer8nvYu6qKsrxX6bOsIR2x1lc+Ytzw/GXd5e2XN37mE994SveZDvL/vizuO+rY/ot6wm/cKBDn0Lvu7H5HSJlyVJCQfvrfuNw98Jry9ZaFMRMkSw+WoR5g7KPWBB0J6eQPKPdyBeNx3RPHkxsR7ul8dE+vh+zFnuJP4F0/x8utFYkoXi3uOQNhnNbS0npLXk9UL8uPdFOT0vXXosT3c6d+s197VXH1fe8innBsOxenRUCwkzL4ROCy+ebWPPgl2PtrYVSqXd2IJvvChHu7wSdn9Tbd9lh17hyHv+IL70+3R14a7/9Y3xkuvml2Xhx10LfDkL2V/1ZEJhTu64cz2wF9blr6GGUB7p7j9Xizv4bSbccfswH431os1/UFxBl0VoJyr4bTmt5JRemsOn7+owQhYSa8Z31arsPU0Wa7jrg1OLyelT+40kz95mpfe+iQVnjzhqXjT2BnJzLVGr2GJz/uJtz2T8n9dtPCPgjyd/5rXBaWeMvVJdfQLJYwadEB74iS06z0NMPvkEw17jfhP3MEf/jM4CXa2Rq1ASjcA9zj7V4RWZ0Me2HEpt1VHvBWIQl5Nq73JdwF96NDTQSfBFhGTU4hvKqbCm7xIkoanzprP/BOS++clcy/cP2Du9OMCPy6nUa/oTBGXDcoPi7GyyF7f46Hx3nx8jvjqmPYJP0u68FBjhyH+dN4I3idpTzoHNfRBL3e+sT3eXvhHziF9u63y5i5/rrBHLhT939QVwmf8zV637CsOBW0ZTvljsarEwRXs/OTw6PBDTMywPspLur7XNFSI78sMnv/uCO78WrxsEXJZwvFr2QEM35m6LguZv+Ey/yttAsvexu93e1E/r7ElV+sSVbzv+v8kt5a9RHOHXeZPe+CO73Tvbu738v0Ett93+/oI+K93u7fxM/8/8ILlxQi3P9ye98f3be96VctWNfRZz9y6ev17gi4wD0zCp2627UnLjye3yYp5rul+O3uOr+9+/5ufwj4Ks0e4T4/RN7Oz1FZ/GabtfE+255vy/yZZAifH944dzwvfn+rLu76cm7vyQuW8f0d0aa7nv6pxMRup67R4fcu79jYoRnSPCH3hZd1uLKkZYS+aF0JpIr3S+y/Ty/rfCPgiz+5UrlBQS7vbd99dyFEjF297SENzXe1SdNdW9u+vJ6vSlrtSk/XsWnvfJ/XW5O5QV1eNnENbo8JeC4hFnhpUMJbpwl/6wkbbeXHu6LoSfLjb3fWQ+csUlS19l5PdPHVfVBiLm5f7fXtK6FEGScxBmJOrr9/ojIUyIUUJ6gxx63l8RrkIRu21J6+aZOEBu5GU4tHlyT9SfSVMr1MRTC69aSYIdzoMXptxNQj1fCPwZd36kLdKjrdLWu56bhTyYhprwQiIbOl05rrLK42pdKS4WO98Z7qftf0nl9EiyvKS6rfmmrrXLj71tYL/KPP9blMk3hf0QifqnVV4ikiFGF2Id9+snw98XAAABYJBmgAV8FfmFZMMDvi6nd9J5f/cI93Cbz2+YuUUdn+LNunjMR/MfDTmYY8xOCTs3tS//Y3yTjUs82wGckjM+ctN0mCRqUHB7c95v+w3Myxm7oI9tC8v7bWN3Ty+TTlYfSlZpr7HnF7uv3bTtStx/bl3x3L8n7ep9i4BX1F7O/mllPbEx0j9HZjZV97QiGYIWT6rfLCJYpoXjuJQ35P19XG7kFqc2ulYzL5mv162PvTDWeX/KTdb38SUpi7jMta8Qp5jZgM8v7t4ICCRxtezqQB//pV136C4KkrycS4RkpunzzNPuWc1P7w2Un7nVPOzjxzmoffeN5/w9R9WV7/KWMPysTvcKMXsUCUexn5dJ9j/fbQJY6TF3MgMXACitXrseQX407w6+TRS8dC1WbWCz7Qrnrp5fWN/XjsjPlT8c5aMlDJQ23f8O7MxRYLcD3FcUH96S+Li1tT0/u9xl4bymx+px1dyA6+84EQJLHp80fgTm3KUjOw7sIPPzF2Yfuiet8bD0sjyJRXdvNt+Wx9sLF9uV/fRYvt4dWkcubbfD4n2MlZpWs2PF/lK8zYS8wzhN9lL/e2KIJepLCPmw0/Z+disJr9nxl9wce/1imeVDGPkH9mfgDi8MiP/1y3EfKwtH+hQ3BRrU0h/SX93wR1oaP0w/f81k/qvKxZUePOsOr+1/qtx/DUuPM+NyfvKBro+k0dFhi91zC4AtvVRv+EeaVON9Te4Rw4Jd/AS13fUcGm+X7mB6O8BL7H2077je4/Ybj90aXYty4C7ndr79+3ZxrTzTcJeCwz3end7X++6CmVFOgde+Jn9PXvG+2Rd6vt1ngw6xr4wKnXgYrEY+T9afxsNzSfjLC1bjTTcIw1fScK+HaKjtHVmnJ8f9Ru9YKaZUyARMxxYie4CF9+UkIX6Y0w943s+Ky6PFX4+su1pdx58wazVoD+RHk/0Rfz76ClA9d5C8n6vlahMw20fLWGkRhwFr6KJwIZ7GeXEqcRd/jfSelVa4KxD85OWdzX7Pcbiz0Y71tDL/NXUVhIi4Ydo2cSDsxL/Rc/G+ob7m2d2oy+D+RtfTCXgh7m9j8EV0C5f7S5WTAGyBLclnH8FBMPJI8j8Ap/il81eNY0i0RbVRgZaOcMJI+QM7B2Img3sL4EH4yRRm7vFBzv+UWG3Re/7ca2zGe/QmH+v7zkaUoXXUJ/pf/Yn2oKDm0ZA+9veLV3QehPVpsT3dHbfKF1H+/9JF0ERHci15zwCB46lBlLF/2shSghEvvLL795svwj5TEb0ujvJ6ud4tlqzX5uPIv7BMUELxmJ+7+7XuQnOv6BWd3u897RVHNRfWi8EV3yl3giI7CnB2oJL7y7JLe/TJCPhMkzZ/k2rwSCSS7iT3T8nNZs7161pKO34u5+96b6PNfdE9JIjrVE7xDhPwWy/l584NZeE93l7905CCS7uff0X1hMo/332fXe26N36v1r/68kEeG1vPerFkXtfIvfJCHr+uwW3ve995kfW6v61rW5iRtr6ayopVSS0twn4XLP7xnHWyhp99e2LEK1tWbLd++xMFZ3uZu6J75svvS2CQS+6ZV67rPon0uf2pwfl7t0u/RyKl9vVW48jvef48XvfWJz/d8JdB/hHbk62ct+HB9Rt9DqHlKfDJSPH0SX8l8ogV3y/dMWkQ+T+oKTp35Epyb3p+xu+6bXmK3vWnQLBFKULvvINPgtG0loExAnsMGfvvdnywl5Cy+iZfuiiXKXLHr1UW15FvckRy5e+/S7zT/0k9G3P8Ll8yX2TkxblEu7v1BHbp02X8nJLNn5Iwtp5te8rG45b7dRRQUkkt9p/d6L4YrUidYjPGYiS2D/JBFRh5edZhLL+NkkjtzX3N5rOZff5Lo34LIAAAAXJQZogFfBX4KBWa49KdDoxXxcbiszmsekYNdNS/+4R7vhvLbraR90X/fLDjNBraO71y8tmeGF7hE2rgEGvBv7iqvl8ZbatmdfX4u44Xq9UEvke6tysPFMO6ZZM7R5SrLG1iVuT/2JjYz583SsCm058uAQOlz+NTZP53S4E5bTAGuQSFhfvCREm+3YcMOK1LCB1n1XPzBWxuS/8bOb1fCivhjj8x0qmB7KnTHLf7iCF5peXlz5SjmL4U8xs5iEnce/xpLKEX8BcSETMkp/VBffv+ae3AYAYBe3Uze3rMOv6UVbL+00BrMN4UKFmqphlZ+M3VieC8zvWXWH18ICj3X1b7c8yhjzJB6F7c26dsaUPLou7BTaNLBWvYBecbeKP389ifYQ4DsuruGjzhd4yJWTi6M6cYws/pDPDSXS2mU+OiWx6WQRFuU7/YxbknvBuT1pMnaE89Y99xSFye6v+O7veWBTu+T7az9R3SMDsh+X5yr95eJxvPcrHL65dGvMZ8svn4T89eGXuGbmL9xpII9pkC2/qpowfAaqxdNP59Y7XxAm17aqaPu8dmZPObY8eHICZfrPWfZHZ6G5qPh1u420Nes2PCuOJ7Xh0/Z2i75O23wCeu9vf7iIi3BlQa/52oPUFBHXgr84PHUTNcL2v9AdgjtYSfMRqDyadumm4m73G7sEttyP1PNxvaoE70FZAfPDAymOvr7/Bny/ivEmyx0sy8/g7yyR5xZ56mq08aUgfVZsIy/iTkaLnwT/Sq3BRiM2N31fjEaLgtrWzvwvQ469eLVqtD8ho62Ma/8O7mnxQMFFWf0T0TQ9y7vy0FeWVSsy//bGbvnnpnx8uY611gs3It6NfI6zn3yiiVUniZZjxf5PvJE3O9qVYKCbkBk68BPdVyHt+425t5oHn70rO4Z6xKXQFlvRlOGBvISPfen9io1pw9R/cNvlO/v8TLlhMiu8PlzwFt19fvoS84i39k179QSEcve63TZXus8IXC7QXMIj85SRrcwrzevcZzCjbzn3D2Y7hmXbu+Qc33y/94SE8N9+7u9/jybw0cH0dj98Zpfqz9xaJ+vv6XUEJioLj7Sw+NI9Xbi7/wgm5JdASmex5tqWf3qRnh/wz/AnhLy7n1GNP/8WNn8fgGzYSL/eIgjEcvrovpwVVAnf3IN6WBL1/3/zfbQwQ3Va3pOz94W4SOP09/I2807N319hRO92Jb7HQ6fxJL3zIXfQSO+9yLdVlhCHoMdrm/w7J/3n+ioWIeVuiIY6RC/T/hLyl5c02OPlzdbv7OxIgaTfJWjm3R5OnZb63aPvrwRFuWH/o8RIfyhqQPXTTc8EV+X1ptcEN9/Ja3kLuu62ZYtVoRCHgmNzrp2+t1Sgj3W+1LMvoE4nI+99S1m3vrFd3ve072RZf30+yEhHwS7k252U+W+zx/Nu9+72q4SPd3d7Xv16P/E73d36ckqB38nqn3TvI/ugrvef62z//ahG933fd9YQ3nX3d9wj4qySL2xn8HX1pAjiX3mBVWS5JevrLcpr69X6XdmETD99GYQLd7aDbo2WraL9t2rK++sm9wj4ItuXorVdYKTXvz7l99fjju9vn9zY0g1Ksos8EZzqOK5Vqndu+xL177tR/S4QvKx6+2TJ6VdDSPk9tq/JBZ4ZX8XvAOuuZx/F6ZPVW45it47TsJeKI2pc3mKnh2wobc7N3j9Rls32hXKtsbbL65OQ813vlkHnd/csH3fTvjBQaf9ZO/CAvSppAtIQKDSuUYvU3xfazNR5t4k3u7u/ZCwn4Je7YNa5/L8krGX9Si8Vn6VBF6lSifv2N/iflk3od5Kwt4VuQU5LyC8N1P5IwmSFJ7Rjk+cSycvlSWmEO7rH1/VOknLCRyX3d/J0uSIKfF0TpPXRYzPb93FbVK+a+RDMC1aO3CGibZKl8Lt/RLnE1lwx5DKb/JE17ey+7f9zyZ/yHZBrCX6QicwAd/45vY68PfGwAAAT/QZpAFfBcqyzBCEVwD/DB+GX+3h5dZr+OmtMMeYmXmBXwQcMs3CbX8wguYGhC/4E19pcgrDAY5//gn+XLgdQzLESszDIbkGMDkpVu42Cd+wp9l9AlL4rWhc/3F/DiT2ozRVnvVunFuOZiljdH9OduMzwjOOk8v2R6vn2UKP16h3MOkF2QZomVeuPsfIV63DP39J54JuU+5wIpNDrtEx7hDe756F7/lhPu5f5f+sVzyvuFfDnDVz6BJjYcEJ2dQ2BC9UV1Nt+4IDCR5zgIX1MB//P7b0IuxAQ7+0A5efKxvXVBtlY2ETvLzq827q+dwVSj13/37jfrfW20X1NHd+P5nQ7iXovR6gwSvQlkH/wC3MVn94oKVj8Y3H5f/saV6Xg7fPwAD1Uq9jb+9PeCC2Bt6VdRpl5l6HX9Ufq2PFqav9pN0EPpr4XuVE5Xfb2BlBLmT5pfMgP7VO6j9tCcCX8d7znR/5YdYiwDJ0f3vJ6aWddmhtyf6hCIgBX3U/b35WP7uXuQMn3oqVtIJx6XuPd3+U7x3CnMJv7FBC3uc7uPHGt3GeJ5iP5EMHavZsRU9FrjYlp9gLed/LxEiKd7vjFzisJE+l3uENJ4oItyy+I2vzkvhMsykZDZo73d9+teEyXvhqSIVYUqvVoFECzY5Z5/Sl7HgHVTJa2F/gj7jJ4YBMv75WCk04m/dj8EuieSfb2rC+lun4JelsOKbKLw3b/jKf7Gly2YfSpcIge6CMjGJzzI4Vw9d4LBXpj2SS4fX+X97sKbjeHrnHyRIfj7F7cEcc6gBI91f4Qy/IDxrrmSfnbf/91nmKPuv3qnceQiqZONauryNzbosSc8O9cBvryXMgj3SfVZb4ITOQrV9tawVkcu1i+SXAfbKw/dFvNO1PL/vRRrosI+CYVP0/kz5+CK7/aUrPWDpwUXkC6TmPQ0npB2LgqMCF7GW7WN+Fs7d41mBbw8hpMNt9CRNzDuIG4zn+qbYumrJC29+PFdZIrX8ntpV/pvde2/r3TuAi/wr7h/rFmmmEPbxt2/gOpGTR5s2b+tH7a1QJN79CPQwj1VP/ezvdflw90v1KJP/uwTCJBqHJYmDm1mkycmviPzLr6wRlP//xUt9Fpwlfd9vZ4Iu7mbbStjvvIcPuk+j1nv0xW3W99KQl7hEv9xVgr8/3drfvwWXLHN9d72/6MgVnW+Xfd666s/rROrfybXIuE2UPqY9/vLD4Q8E5L3Y3Drh61l4y73e1N9K5+y5PWi/ZSiSmt9NijXvd32dM6d9uXYIyUcunsnrRpPgkvfXY36+5Cu/L769eEN75Nu/3d9+8IeCIiF1H7U/Ezb3WvRdX94JO7ud/ZECQkDtlz+76fqz3eEfDWViMHFfh22nl/3UFJNz7t4rd3xaxMsSUVxLBy5vrIQsEAl7l77v3TLTP/2L6pSXu/fk9OzPCHCd7niObWlr9NYi6nX9k9CGCLd5wd16hMl7gUXg457fq4XmNgbhLUJ4Sp+h8q8uEdmdp8bpXZWFBDu/NK5DV3b3fJ60XZl7UhTSDXeYL93e9D107/xHycn9flRCVjIg5P1pVUMkBLt+K9HjQaPQytr/wotcRe7jJxv37YrLy/fW/JCAk9WmUuve75Prl/VKuV9CfYn1k7lrrd38nrWIfFMmXP2cvHa+FdJGOKs0pMuPeXlOu+8vLHvUqeI17Pd4X9GOlZr1RQXQAAABS5BmmAV8FfmFGEQ3oGGX/Nkk1F8L93zWXFy9P69/xfKNtO1Dk64MeYmMIb8l/9wxCbq4FNN/GytRUoNDUP4Ogza/yblpL+/YUlBV7cORmcw0zIz5/++1SRWuhSD1jYvf3DEM9gJf7RrP/cpXf+H/8yfbdlu4L6W5OjFYS0HoItX2zxvl/gh6qnH77ZTfyluHnW4U8xtJ69sEBMBI+s3bgTUb9AFDusx1dgRoeBbsAmP9/9dY7jF1+3qUDxU1f4kNqZaRcMtq/tjY3rhCe5+haFoIfh+4XMK1/FKXgr0D2yn7hBg+1kEwjL7vYI/17QV35t4AZqqmf37+Wbb/3CJThuOkiBSh06XswXEU+mnLHyrdy773KF9YTgfI/68+tN9ue5LuU17EoTu0baMrV1K6bBgUIv1eWLaJ8rrX5fk3wTUKPd9hHbyHxTflvQh+gS8OG4e5a/xzr+gWECdNSI7lB4wPGTQO1FtloxJETInLzprpxuV9SGbVYGz57NQfeW5lDVQaAaxm/s5ux9WQe1LhrBpJerELQleKx3/882v557szpVRq5nPrYv9+mN5PrJ6H5zHNtD85SReBft5Wiv7rmVDP/scGJn1l2N/k9JIrroYUJMN9+3muVG74VcXFxYD1kfYM7bS2mHvG8rLIpwkcRPbW9Yeln/5PpJP3NyFH5KnusFeSRd25UB5ctR29xF7420+T7vOtwiS99ShSYDwCHfu4rpsbbci/Flgo3AJ9DF7/h8k3VuF3W5G8reLUGl7lFQ7fn9eoI7kXtwRPQnuFBT3ve97u+sv2u4Rp97y97ToBf56K/uCOVD2CEMeb6CncJe4n5+ik26SI54Rd+w9sn1e6i2MEjb+3bzzGUfzN7d5PX/wmZyTzl+djY2znh2ki2gXG5alcZ/2T9fXHkfLYLyIPcnMHcdKUYT9Fqi/+2CKUL3O8JoJe593jSi81zrzFJgbuggbK6Ih2Ymceo8P58IGzzLuGxI26POka4z3/xV63OXb2/2RG+38pTbMGyL3tJrW07LBDhjffc9xpBve1S7k2L/gQ+e55QiFkS/XCW3HOd/0eLPe+5Y/BTe8z9DfeoR8FBibfoauET+ssrfJ9uT7gmyD8wXd5Tc0+4Jy3I8fonexH7Oj3sQa5Osn7Gxd33a7rPW3WKz/bf04Svu9/da9X4A5mAt11kv27/9KEcsFNMQPdXk+73vWRL369l/8MyHc229qMspdx02a2i/aM6+y0I7r62fLDJ7VC0+VGI5dvsauyIRe999UEynh59dJPpQg+1BaZ75/kvJJd79sOCX31qqfy/3qQh//sr0X4Q3j/7tmkyevakiLvfS+CO7z8oOvxEJ7t3fpLS1kqMvfBE2LZ4y6N3hxoulbUqZPuv3Bbz+dCeGehHw4SVifv/UkK8QOHavULOLnznefjS3Rj6I/7Khf7Fbu0kUzrxnvk97GrDAJ560x1lFedi9xXk+tT+xsqM1Or9lJVy53PttrWuulJu/2CEz712Lgruc3hSA/3GWNsKX3TlPu/y3v8kIeQkbuf4IzHQ3SfywTFe7u73+95V1uQTq/XXv76p/zesUTdw8wjt/u7lD9hLx+8vxnPssZxHpixEbnf5oy/eblEu/VHXn6q1ul6ae97yPRu8hYTX4gS7vZsjf4i7tS+1v8ccuxXL7uf+6UnaXKYuT7zNd+SyPf33X6Vo0ZZ13P7vd/v3/Cyy9iHafyIu27695v5ogXN/Sfkmufy5qh933l7u+GvITkxeILK3N+hMfD/xcAAAVUQZqAFfBZ5hSu9eYz5Y/MV5GYY8OZwKOhUITawoKnji/L/7hElYcW4mHQIH/kcWEXL3r3CkCd23jv3cxJ+Y6HtFVudeyg6LCBQn8wjaZ5IB6s1fk/b8Tsbua86Hdo+CIflTPPYF8EV8bZQHcmhn7xLeregWkOLb9zKSp+Qs7B4eix88Xz6/NG8v/JgwlvmnDVuYwS1u6f/Lu8KeYm4QsPcl/d8EBCUecsAEX2aClC+VYoxWQea3YxP3o7a2CWqwQX6xen6ftYL+jRBNtHxnO+P3K69dDhB+i5iC0Of3Re8NM83TbYq8Am/qcedkfWULXvpwUl4Q/tE6riVR076zwwZPPm6S3L5ecG1siHTCTWcL9r2gAn977/65uevf8f4karfOLTh+HIuvduLlhO7OyilmSWssEmdM0tKHwnyt5evw2fNqS4r+Ey/3tjRUdkseZrGFUeejGZy98duH1U4Txzs94b7BTE6VWXBwkZlcwNJT2MI5ems/naDODxHrfY9Of+t1pb6VDISmn87dIzehxDCdiTu7XiEby75zH8vv3hQsVvlzL/YHsE3D8H1Lkbve0htKX9dY3Po+MBpTOX6vxk60gCnLOHcq/ODg4uLOY5L9e4RKkOLcKmBz7uv0tLfCftvnSGCkn9/jZeQbGta9xcdV6+w5c14s17vC8HoInXH3uO4yYJw07xvOukHbFyabULc0f8A9tZPym9fWH+73sBHt9Tz+OrxpE/CXgnMaQUbDv58qP/ffuN3XZ0BbPe/gSP3vj2n5UD/UcW7jGM/W1jf+ysaX8cD/x8o+AK92KLGcvcCrTS430npJl3nh3uOmNifWGGYj3H/BAKnOW8EuNFLI/yfpXZ9De2cLyEqhqIq4OIcxKxJMghcbztpxzlUFQfL++o4uHoFXvh5sPRA9+4ghxwsAK++f3f4koRul/wR/HjtO8SuiobMvY7q1m9y9mPDdTfuREBGvv0rKZevdqdjDv55/TQnYwR6RXVqw06wV+cy1reuHXkhpe2kt2B/Kv2pehp6EvHCJ47Ipa3609YIr6PGTAh/X0+27cFkegO7eHJOloXoEP4838hVXG8ICKxC/l/3qNPH65zaA9wycZOUWyt5TjgXd8uj+sLU2+7zLw3LyYtfLd6sXElffIvk9pur0ngLVdvvto6oJiH7uceNDrNcok6/vfhDzacvS4RLKR95PtfE/toP3cBLXd3+zcrHnOC32dTw/il9glwjwHyQH93fXTQJd75Q279dUSLK992PpIEEvfe7yqzV1rNP9Yy5L933IFvn9pZF6sz7hHkCma43Sb+rr3Xu/kgjuV/ltsfVHc2pFa66Pdm93rl/93u7rxGxO979SyPQ+X9et7vCPijT+7uV/flKEZ87a976RTCXonpLS6ryXe+8r63+gR7vLZfr068J8vu9mEvFaV7xY/on/Kxh8/d33uzmXf+zrJLqxAi9336Ru73v/lK+8n1WVJWaGr6fqjtG1q8qBBl+II/yB3eLhD7nv9iSb14R8mViOr8Sbf5//ZSvd9J9OIO73Ov+vqutFShr6L6JJfTtPIXqjGHafk/a9eEvCeHVrOnvXbjjXtvq4QvHD3d5+2q+urL8Z22WT5PVCju/e9LmQ6Nl8Z+/jge7iB4JvnvP/hTURP/LIBBqhv35WKLH6PissOT1pSIiwUHPC5Iu3074i5f+R6SLrJ6aSyLhPniw3FfxEI7nQbv3SprooJrUhN9xWLxC7+wmKtvdby/5PZPT03UcxQvl+RqKX8v8n6ReUkg/y9XsGchD8f+T6Un9lKk8dUM4ixCsK1X7vcuc0Qe6rUuCyAAAAUsQZqgFfBV5jcfnfmNaS/F5qaST5f/c2W3/mIzyNl/94Y8284vL/7hHe5BONRzYQ8bsbCplYwZn5L/uVhTlGVSyh9XyYcGu1JIYgWrp33FZ8fLl/cIXvcrt7mQSfpbl4vDyJado5aXr2sv05Nhgr3LEjT16cf4S/TfCd45U0/ZrCvmJhq8d37jSbIIrOwXFTa1ldmfGzM3W/phPx5IARSnydR3+2JX0W0T8Tw2Zmr/jDG3J1Tz+t3G8a7F9tC+3aAJ/b+b/IOkbSSMUu69fnCtarfAfc8w/kbHLh04+ucvH9Lv3Du5xS2JTkSfYX0oqnIycd61jif/7jCh1w7KKfGeng31cHuMJcO09EQJB+T7r8RdMJeebkD5zw/dUp4cnBpwb9Puv1SeCnw0ksasxaFkifltJH4iUTvvvJ+rX6pXL79f5Tp5RIJv3FDj3LFYrCy0FvDEOnrdxtH3QZbBMH1bALaSUP3frO6cu0woiIa/Vayf4ed8K9EvSs/Y3VsrCR7ztUpXvn/L6b9ifw0pjD9i/0lhvOu/wgVP2LmDJjyfW08oFvnLEC2iwS+a6WTd4YpVKFLqio7k9VzXfauTuqSFXd972rbZhGMIYM9UUtSpoFcOoiW5dzB9im2vqakwBq1TjuzpvtsJc/vKuGpOQl4KCXlg11bfqFN5VJ+yT8WzZeqZd3vrL6dvQIIzJ/DEkn8rQRI6lZeG+Z9/h2YJz2QtkFCSRJYKQZH+n2+0+sDWnbpaHhDccfxt60HRH3NJzK0WZsUH9/hb45SCN6Lb3MX08QEXcnP6xyPXOu91uOKROlYMb5gLzx46vOx5MEvzxUbZ675pXSuJKZj4dgqvZ/TWX/fCliHz/x+AUesqmYS4873Bf7uYX4eELRrTS/7gDV42IWu+bNxsqGNYadx9fwl6sVRZL+4b7K+nBJjTPwZP2/GvG9C1OhW8vDmyBXb6ouQQfNq/W7YDSH8vpR/jbz33liZJT7hvv2v493oi9why/dPL39/qCMsgmNn334JcfTPMO8G3lPv0VArM9gp43U9ZjfrBMdztfff/69CPghI8/39oVe97+2Ehs+42ur9yfba30CUZPt+a8Q0T+re2reoKTkdOp2T2KeNujy72WsJ3L0Fuu8tQnve99ngpnTTILvX77e4+O62A3w7SJdv7lI9/oos/n8I+C4ZdJ3vjd6O51rd+Lnp7v7s+79PrQqvNXuveq6d8J3vfe0lUWW7W96SVN6K4S7BYR93vu+XfSSP0XL76vaVP1rBpcnrNd/WIpXuefpTXf1m3uEfBPL+pWLwdENuhUqGHdI/t6V2Tvcsd1QQmG7y71304VlvlJS461dP9ZPPknqr+pdDcubFom7vvBEa6LWT18nUZu7veIRl9X08OCPH340vpoQfM/eY92x9z/u93P3sI+CI06jc/CRLy98bxL6bkWCstx+ot3nife7ZPVT/Wq/I2d96TxvTXku/SmYgnaXk5PTSy7TKTlyT13X4S8Pdyyz+OrfpwcN/dcs+Uow3NKmnhbvxX2oymUSntcjNc29NCYnk9WlK/005/X5fTLk9X3cnf4IST8g3eUKeQpYApXeX+/BQUtJdNlsr2tJKCpISUvY78Z7iFrvIaf9ZFiSmr0ZOl7Evb4iKp76b1tSEu/elwr4IoYu0SpaXkFl9dJxBmSkNU5ba1KLskR/5Ny/2kTXhITPPtKklqSSzy8MeGsmZEpiO/9RHPk9XSJf6glqPabUckr3x0p5IZobL+x8m2/Jfd38PfFwAABQNBmsAV8FflGZQqktzdXXi+bmju4Y83LgQ3aivcYTRj8rqHTAQIGKadvXxH5Kqocd4Ol/LvClO5UWFzxofLhnT4VfMAo3p4nj/dkWf+Czd5cKEfhpfjb0XqT0nLotMN5wOFkIQgRZ4IU2lb/4QLlsnzirvP9e4rnxKYLP1WWEepSyQ5Gv3VcK+Yk9ZHS/u3ggIJH0F34OAYq6tt/+B3DUtk6qWU0Cb66h6kJDitfSsPzYSOG1bVMdZrd+0M3hnVp14/EjZACf9adPsJJPUfCNvlFpQ3ZVuM8RaL0rtM8YWCT6wWc3tfY2Xppc2ideLdi3CcYCVF/9y/dAEt8k/afLLGaI+W7vZwm4KiuwvFRoK9YNBbT3VPQ7dmeUTqa4Jfj889K+Kvu2GhF3v1RbhbkgUMx39l7/2peEM/fcs5Ul3etcZ3bzyfdG5f2pVve4U8IiC1IXyjw1BL/NIP5uf0tzxAZavdPkv/djcf2d1cN/HZK24Lg0EYhmq5HaxWz1md9eFYzvZVuPJ6C6Cz7WhdoH5JPRtGvVyvC9PZH2kVuWBI99VUSuai+HrXJGFgu1PwbP/XO3ucrZrIlY/pK6BFlTjbYVPwgXa4cd3neUG6S3MS99e1r1q5iZUwh4VnW+MjAtoK97neBhUS7hNCPHlbteoK+HYnj8P2R3ulQS8E5LlYt6HfL9N24Ksi0Movg33Zx9937J/RaSRWDDd/cHGLrw3Sn6cO2uQtuSUEfZ+NCNVzhBf93b7N/UKJ476cKcFfkgEljgYzK2IEmqkII2h/nnzW9h4CBWuRe60E6h3LST2MKe5pd0vMPvfZV/GBPRd3jWN0QR7ErRaGEBn7rv7d3diOfUPTpf+1zXXx30vmU6aUucZc0fVluNQnd8JRq14bWJKfqquhpkbtTMUOhTflZS+03iQNHqP/YTdUDSOreCF+cM+sETfuvDvcaKghTvaReG9XP9Wb3y+TzYQF82Xc8E72YRfqC0Ytb0P3VdHYrzPj6f9OG/kuCSuv9ngqJiH0j8GH/6rZx4eTAY1m+7ylIDovwdGPT+/Y/75PVI/6Nvek88FW0PUscpDBNUPf5F+2Nryrr2wief3d33l4RfaYIzOZr/hfQn6Xe8OoXVc3eCUpgW/ZhLY7dkoxkOv1q2T7/3Rf95d3IfulfxEEXn7/ha58eWWPF2a/3z6/e1r29rhJ+WCM1N033qqO/drc7vdhZnL1avpPErql7a7q7o6eN40Xyx8S/9NX3/l9/2d4zTCHhAVy9NsNu+/URrI138v/eUTnZfRl39F934yIhv/9T/1vO/qiffSlJTfV42rwn4JPGaf/RH+ry9vrk/Xy9CzCyetZq/r3gkM9/baxEZd7u+YF7x05eT61XoR8hbfHafBGYr5n2HwVl3d3d3uWKVfKd7qdCeqXBFd8gtfkny+6cvL7qS+v/fJ6e+I/RUCzxXcvwhLG+dr2NljzXW5Y7CioSL+JH+CrDLrNHxXhE628gPsICnd3t7dJ5nonpJJ8rRRO71uTb8nron126yAitnH3YMgGtqThAjVB3MD1tve79pwmtcRfdxrsn/l8mX9NDSHPXfYnvV0JILyEE83u+kk5F6ULYiEySvy0StV/pJtIFIlKWHDsuu26VFa3l7acnr//qFCtO8xF8uW3nyCMzcFNtSIiBTueW5lpXssKsn41TJ9L/uWKXDS/ZMH+JcREdVyZfw98XAAABNtBmuAV8FfmGVJnXuLjL6+Yq9l8XOXxPuHdhXuYmXH+LLmwZtR2JFDPhEiRwYUizSD0Iu96MftII/y4XzleUNHcdCda/89uknteoRCPgcO/LCWfn+r8n23Z7uCORpGT3pV8Ze934CD3rfR8ab7xvyylLVFrL770a7rl9/Ihc5d1q+Fn7hEc7uCR2rPuATetvj3hxbQDG++x8vt/gmKc30pL8k/OYNbhqR/rC3VdP7hLSU/vdk9c//oshamDut9fl3PmE/Ns49dl/vbGG4YFcCWwSQaxnXTMe/TtmVXINns4dl97acbRtg0j9L8WqWypwuhAd7bWuq+XBqxelo9QY/+L2tLuZFsUHh1Ov/whtLnW1wnHpe/HB7VAGPeebO6f074S6eLXTlYKijQu3a8OOCsZDzudcMIfm96Z0nnhrD9pSL8sy/H18vv+DA9ln/d2/1L6vG2Qi7P1lxsSBY6VZULJl74ZRb8KVgeb1D5C7OwGPEDIUeGT81O+Cfa5G9YRcq6/L7V1YfK98QhFoNNlkqXJXGv4WUjCb/BEMe3DSA6r83jq6Kwl5/vGeMnpK/mQu2QLlKp1FKF8i23EiT1mHqdJw/WpYjOuGW/rxW51/8TPX4agtlRCfW5V4JBC4bUjkH7p0iNwl4KS3L/J9t9zXuCeTD/e+t+5MD/qFRClIf3bTc8Z+pqeHxl9+Xx8fEjHS6/eCojJD3jR66n3tpj8cejA9hjpdvFLBMUMqF9yvIv5Ne+qJLNnHQ9K29jfQmWYLzL9jZStSy7SCmRZ+7ve7ornb7CJpJFo/w/boiAOpBL4PdnuCbe4NrvVv/2+sOCeaOF7r4RXJaEXHxeVqxpX8T7wQy9Gul7te4wScL30zvO3zBrh0i38v6fIINk3C+7qsEnjZ5fr7dlGe3N3QiWCfMRKxvezVdiYJfr4QtQbjWi2kZk+3uto19+mII97t79cqJCHgmJPsQsS/t7YJ73vfl6lE8u266sa1y737hIXLJ939eT6yfXt9NZEhTsn136u++zrosEW8uHT8Zd7vu7+79pnKxuEPFCrz4+mh+xnP585/0nd0XwmJTvyigaST9a1+v1BHy45BZf/l36vjBK5Prrdst3f8L7pOGEOQu37pn/9USF17XThHw8Wpmcka4XcW/F0WH5+svpP4IjKOq+35RJYQqNz2Jfbgu3u73SKT129clz497GsIy5GVt7vmtq7dr6PcsN9fuIJu7v6aFwJtZot7T9wls/Xuju6IsSdoJH+felak5CO7wj5t71t0JEZdt72f3vjC3vd5fzw3enr7yndLq25Xum+32++6/r8R272YqN/4s2qZh8DX0y/kvTG53CXhPL/hD/HSGGfu+XXe4ftr291OVArF3uSX3vOmnnNcufikWUq99CF19nu98ntagtiv2Wa8/dF/pTRxncwL7u5WL9bHk/hMvrfkFQJWvp7esLpJixdb3eFvXqyDj3e8buSK0ko4VbE90M/X1lu7+pL37wSXe6ZU0kMYLruXd0j/SLqymkOxnCvkMXVOz39Q6Tiy6G+lo5eR2SywBLUu/0+4KNm4YWsawamTdakO7on3gjpaad1fjvFad3mzk+q7UaIBVu73drcODwtGtSQReMqRMn7k/wx5Id2CD3y/v7rG7/XyRGQ2XMf74LIAAAFCkGbABXwWP3CIrYMdakAR5vaROly+rG1ieGJrDeWeJewZS0a6m314u8x3e/y8YJshfzGsUEezelXuM5jwIvxtKlk2+7AINeDf1BY9djvr1K9mPL5LhaX9/COd7vJ3DbCVu9APiob2VjPcoHZt298vtR8dFH1sntNO7Ti5NDcXRffb0HsTBfM37hLw6aHbaU+/VdBEuTy1T6NO/LBhve09OP5cal/3ot7v8tpRuNwp5hWaIfwfL/bthEgXFe7ARdfgVn6uMfT0RAQ80j+W8pf7psaXXEulWtlwHwTny042Fj+v8HCmq/pItodcDb1vXYC9lMnmfIu9JVjr8DvflFB+ram9Jfk9pJ7/J6pWv9HghxlP2n3TnQIylbzozXl4dzaheCfijNJt6LFkCSs8QHF1cv1PYUI6YhpjZFVj35QdvWqL6h3rs0m+y8n6N4uOT/QUw33h/cIFyFMz8BerDkZAAg+/m/6+Tyu0vjJcv/wgW0W1X+GWAmA85zet8Vj12yPMv1Ze5Q425bhLu5t/xRnZeHpaSOkSe1YlF+CsxB56UA7VnpNAb3LeZsfL79Yst3DbZLrRJ4S8FhOViVftlhs3tUFLvZWavpAhXsJi9bZmz9Fvt+X22hLLBXv3bIS8oqCX5nhv1h+vLcfnE32Mgq36id4j34SjF4x6v9i2CznTIXRtUTy/MIQZXfmrDeig8N/IcdWT+uduJhhLs/Tr/3/uMLcbFaw9MpD6X+3v/yP2NhaCQXJv4ufDBQ9H+4/q3Fd3d721RUPuNO5B/1Lq5bLBEvnzm8npJa9hQ2Pml8wFjol/jE2PeYxhCv0PINKqYIaLo371HiEv+V8Jar3WCfIuxv9mrl5GpOPryIX3UIb0pnR+3CZvMozKPmSs34Ji7pZSJlkEt821PwRFe4VYzDvFml+1S2mJlhM+7vft9N8it1gsJQXO29AQ7jLU2w1YJtt3CX5Xb9v9H1CPguNnzmZOz4JLbnGt91rd+YYzkb+Hxec2+4WfXw6nzTp1s/r3BCKPFOk5795PP7af6xBdzk5L7WhHyNX9oJ/D6LoPd+vpGu7/lEn/CHgsHJz3vn//4kqekfva7mNCS5f/tHTrR4mfVgZ+58jM0qJJffmo0GT1rJS0WLeuJ3u+/dYPUI70kN9910rD5T4zTCBftdsFIhOma7nZ3vfeyFgltxU3kOOxk+v9xYk/n+8oL0yiiO+73vIt8K68s8Jf0f1/k2bvfThC73vc8e+iyd3rkc175f/x2793e/7E3OvCHhERCf/342g+nHafRHyfVdE/V9Zi3v0xZbvu7qxP3U6Hfr8STka93tsSlMQNu/YB35fTCIlzSvn73vXyQj4ey+iih4R04T/xnEYhFeUuHfv9CGPLCG43p3d3v1lE7v2lSh9U5V/qbeirtQgfd523b37evkC/Lm3bf6nPtcn0ltPQSNlh5f0h0BNr5L9xe8/3eEvD2GnDvZhdMbUQ2vf7IZ64cv7BYIc/8Ji9La2BtQN9CImT1zJTKwRiyf05tS1Ccz++/JLP99miBJaflpJ6qSbieT7Wt/Vki35JyP9OmbbvKkYUjJ6VXkZodJDTODDT+992p0lu3/7HmS7hPyDnAH1tyWeT6+P97vl99FzHe/bffu0xPvem9JRMkEe70qk9PIlLJe9wzT9fWp01ub6XkbKf/8L+GrSWnXh+/z/4jNRvJhXvyfX//ujRN/uMM/v4e+KgAAABQ1BmyAV8FfixnD+UWx0j+X/3L1el7mJd6euUup8C/mJw41SX/3CPHSyjwk/6LNC9gkbLVb0HcbD490zL0Xmj1zWhZMuNdRnx87xcf0Zfy/5+Cm7+feN41JDX4ztkDzSUQcWsgxgMm2F/d6TLPBb4I/06hxutKpfy1tl7WstECEr5ru7GMtC/wnvJjVYWL+9uCAcK7CrxIlyK3KFzKwZ0/yY3r61mKrMYb5mH994JvVwjqY4LlSFF9kG19houC3sAgXu/dLgR/s+//cf8haUdzh5qaTxFly+3p4S5Hz9g/5ef6cT9OvW1Chf73CmHP9bIarthr6nI+c2eGmf9QQvsbxvLzD2h6q9GRsn200d6jTPQ3N5dqqN2xSwSf4akrLcT+84KiYWwJW5u1iYXe//uMKkcldyLse7tLtYxJPGOBB+HeXzaR9BOY0ct2EW3i1kzIKyerV0d/RbLz3SefSoss3B2GivvUsEwg4XuUpD6XlASHXM4I7svv9AoKRBD3nB+4J9j372coS8EZJpXdv8K7uHZP7h2CjvHWHvv9K7gu+3ZYS3OOaV3D9LItY/bftoIzrDb9I6P+YYH3+Dh3ub+nPwQdw8RX8qwAtOuQ9PeHJQG789tDdflUcI0Wkqwep5/SbuJkP3whd8b/35fbTuILMGoZlc3O83afrUSrNcvDaW1YGd9eaOxH+4owZcXwBN7+R/M0+jzZwx5s5ATVZuViIekji2nhk/XzrMLw33+EfBcIcZnb9TkbgV3xyvLvorFa0f9gaebyelXkTmz/3jSbsHJq43gig8F7+63jJf3QQfZro9DegE49E7t0juZn0+KQ0q+I/ck7PZQ0Ib8sDzwN1Fv1w/2X8nvVW5YKyjr8+9n4bbyVPwYGg20a2IodnDxhSWmTc9Vbyer+NuED3d1s52V9zJulJ6Slllmi8tvpP6UKYzXxK/85jfH9zlntCBPs5JF6daSD5tJM4K746uvadhVwN3S+1zQVlhC6QevS9d/cDnd3ecyevXqYpu+EfBMII1sj1y63PG47gou+dfy8swtnCHlP7tPvL4xd4eKE/vskaHp2l/wZ0PR28NKycy7NzYfafwSjrcowQahbrsL2lF57L+298vd79xJXc43hH/T/UJY+4Nno/8M73U3T/+CO+7eojWlexSnlqP2vlvfL/l4fvfz5e6/dXCS3wgS7vzcufSpiYSPCy5Ls8OkTwdpXu4Toymoe+9bWJGvs4fu8/xApouTH5Gv6PEGjgn9e8vrwmV7932Lsm5faZ0eWVN36xhX3u13e79IgSy/d7+hJz5Q4zT4Q9DG9wT7v3dz8glq9WP936j9G5+729mM45ffVXxvfo92BRgVvJ67+uT7/8Fm96T7vOmsnBIR73env7hEv8nclCPVlLu3ponKSf+CPd5hZf/o3js5o+qKhIh3xnFXRPr6TWElnCeJk8uXzpPib3d3d2qxurH+utwRDX1RSuTutoxnIP/WEvHlYbf4/jb/eU6gsM7Y/S45t9aFxsrcgxm+2xwu6bu7txO+UWidKImvuCc2r0/Kd75P61zw0TGt6+0e/ubmi3VO5u5q9QQ09xqD8EZJjcq6Tf44kAna7SgYQIeh8uUdvH6PYUf4yN1fjd32rR/fL8E5N07rb1QtF5eXLr67GwREl+UGvSe69if0JK++rVeZip/uHbJf5MLLbwmIu7yZ3q+tpKnrQIT7uRPMyFS3tVckVu7gSIhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gYhS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeBiFLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3gghS0tLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXeCCFLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4IIUtLS0tLS0tLS0tLS0tLS0tLwAADdCZYiCAV8mKAAOKMnJycnJycnJycnJycnJycnJycnJycnJycnJycnJ11111111111///wXeAAmFEcIGm63wChQOU1oQEeEj84Ba2lPP/u7vw6IgATyCOF2cOKFpqPMANIgQwqMkPMjTYR88zMDUXZnDIT/2wMBVFRFyQ8o8AmLIC7aUBdUP35c9jxADMgEdpYBJkHUp2iKlZXx94+K8ARMxEOpH4OpLDpP8aQdXIO5Ma+G8bVvsdADZ7iv8LXiuvHTinnkOOSBqz8zMW1xGr/eFB8gIenZ4C5xzag4f/q41UV6poBZeFcZqiuRdDHjGTxnGOzAUtE/G5RrdEamCJMVoVuaNj5YBeBUMdlkzL6n4J6JOpRpzx3/+qKSk74CvjuAV6HcQVTmGubuUZ/6mc1NVdYHpiT8oycn5Q4VL+vkx059tzV9dbuJuoL8F4sWgiDljbTNEnc/O1Vo5ZuG7DhMLZqSekbVNZRCesGjSHjpt2av/VfVWHShCJT6W7sJpWbFyK5XyVEIJNEBu8UFh2PBz8dUdE//CW7gt/rrrrrrrrrrrrrrrrrrrrr//ycnhAZwAIZJMa9CI7pwAEwBGoBU/XeoAY4AmIXwADqQIUT+vhLxqtXTgZsAzKLikgD3Q9pN3Bbh02FpyhbZRrGreUxjALt4KbEEAiIfgTGbDQU+GvbADbWbIgko8BIfdfJ3c9y3YicADsgIKL3PR4gJqVk4ELdrEyCiTdPSZzjT/vTzDHw0mP2gCjgVAVgHiDIO6tBZMg4R7nmZsofTAMYE/9OA7MDFLquduiDdNt0REgRpiGNQB0y/zIB/59OImSEtPGHr0rqG59X7+1z9yfQ/l4MPmiivtxCMZypsVLCj46Og3AoGAATwOokmpYUdr1mlHfgCSO6I+bdsc2+1H3bNsPoNLRhUZJciNTXMNTAGvV03xtdnO324BZ5/b7tUrNgF2YiC3j8g/D/WRdIWi+0D6yJFaQa4dl+KoUnp+Hjf/bRrZKDgOx88pXe/QSvH3bIHTk4nkBJW/q23PArASAAUgcDXr/jGkv8uyNxi6+BA2l7s4nGzgokTK03J+aULFZoz7QGHZyon1Rub5723yDfCeQ7UR7UEOt7H5LerG2Zd+E7pTxNTV1mzfUc8VIbxQz3zBW3ghoQrWNsX6l+HuCtcG66xcXu4i7iKrhiXeUYz5NBpvihuKrC4crxvhE/nJh6CI76KY/dKpyOgOBo35EA9riQR/jEOPVFRm9pj/arfk3q7cbY+AXOjmcOSiXkEXuhbYuYq1BwmQr1lUz4e0O8p/uJNBEnZyJ49z4lUXl2OrFZaiNOmuiogCxpc/M5IZohALHsixY5nBUjPgMeq98hP+9h6bFNY6m38vEV1D2ezJGW37iNb+xX0Cw2AZbszCKEiDz9k8wF/SvXeoRUJPQSUsBbJ/M2EctiQCoThulKkyOyQS5Gm9U6g+G6L3yhPNSnNuZjoTY3XQ2Qh4lus91duGb4eM4zmLqV30xWIi61w9c0/jznlx0f4KpPiyUMXPo61A6n1LUG/5j0DtTi3isJCyygaQHAzk4xmsa5TErfpXvqqzqZq1lVORv1pdMj3RiHwyHzZUGsBMkPknX4tbLTIOeh4+knsHsDTZmDQHGEsaHIwrhbjRI63//mfnYLYOrJpakG5lgS0gBeVFd8vqJrrrrrrrrrrrrrrrrrr//yIsIDuAA4UcAX0ggxmBqACY1OG4ADsIazftaQoEMDUStHwALiCKCxNcO6qf+7PNtj+KXW0AMBg3xsN7XFvdv8FGQ/gppwQTikBAAhjBcBmM1AT1NgkqHbFUAgsfZgQA3qZ5EY/pALSm+RE272AAFWyAAIqYDeBHR8WWkzmzYNb7BIYiv+028+oLIzfsjUIBctGX7//8Wj4gGEg4/JYOP7BgmYRXp0Fpoj0T5JWkefoEPq0Y+/WLOQF2b8Lz/NhSzNzIjL393AVgdr1QiJ+9+JXLyKfgMTARimyMhFBGoEkGABEJhIABhxid77BG1YPkM+0FxIDHKAll6nukFrTdG3KDKX0TwC5yOj1nPs5RKIMws5F4Y6yBP+3q+KWyK9b102HjAk8Ul2w0lPOEPNb/Ogz9RMN5ePqWzG2XqIoBqSHe6Toq58df1wUPXF7jUJTU9Cf6mWzz3ie9v6MUogABARqx4Bn5pO6m2B3OfYuhr3b2EAbECEo3vOCqdarPkfyhwAnWfZc8YybePzm0uPXNRV7iF4OWVRVfPWyYycl8HPeUaUv+4mnfyvlfO9ztTdhAVB5sIlUL3WAhkIbST/D/iSImr7RlKdPrgKvZmwUMgR1pPdJTDi730nP+YBQ3YqUyscELYkVrnG/fH9UCORgsngLto8EmF0bIvCxIofzyl86MzmrELoAGrkvBjBq+ZeFI9cZlGCe03ut09sU7KW/qxHnKcHujV1v4NOTX1nrtNV7vo7X++8dsKd/wGamTPCd/5zbOn06nrIg4VOvpkyj7FF7/f1MABV4FjhR516wl0MGDNw0WrWu06A8I27Yxu0MooE6hcol600dAa7RJM3bWVVEwdFtHGzwRZJgPd3eqiPdALeYaM64hl/277Z5o6EkrOX5jmIbpM05oh1UPSmr58iPLcFQ3D+AbLKhT2gdCdI21eBiGaIEfhMnIP6rseOSY/TafXcXVmgTirqRrx/mGpCUoEDH+701tEDrz4bz65mJvvhS2rGUb3Dir9/sLstPPkngjrpGyPz4Mcp7nX5uFmnmL0MABoInMlZT88CXeXhMugJWYMJIS+/2PB3jWaYIpUde9zCDkRxd3d8yuyOZj2ZPZVM2g6Ce7enV5gvYO4AcT2kVWJ970+4X/tu9ITZD2sF/u9ao22MjXjZRSsGe4uJDfDkHA6J6id7y1Vagx8lcBlqKGj4ehObUfUr0Kswk8e5oIqiCZuVWDrtdAFbIjPWAp9XQqzdFCKHJGbiHeZ8sQeTSuVylVqyhmthde/7euSYtW3V5kZGCvnHAhO0GwvPwScQVJvbBqwCBiiuxZvEHwOWTW8p7F0VLmpzZSWgvrmJ2gydnb5TOXMwkpt2gWUIv9sACbV2/cj+XCTQ/91UKwX2yXV1JjO9J2/N5h8lBg5ZW/R2+Plyy9Z12UTmkBV2dEWbXwwnxobileWik0utGMwYpP9XVo05voiEct0mVN6DVfUl7ENHWVwRMIcBciT45y58/qac7H88NQEQ3W/KbBC/mK+hOn7o4YQmRBU3g1Ix472ouuuuuuuuuuuuuuuuv//s2HBnAAzb3ym59QEBw2W1nMmh6E7D8yAA7kJS/Nv/f+CQhycoB4JD7pBrH8EATnMEACI5/AJ8hHBftuADsUzjGIKO/WJmZrnYHcByAAdEm4pfkDtMtrHCQKEiH4cTJtFT94HVCHVKukWJx18MIj0WAAf5T4rfGH/2+eCGbTf7djk3PIkwN/396/2uKWICAGvWNgAgGj6lfTVBm58LMbbaJ/Al+trjH92Cao9p53NGBTkpG7AGKZUygnCX/uhABCGPCAAQChag0mOqrZolSo/9oE3P497gUrMRmMnbN8cw6LkRixVfFatMC2dLtuSS6pZgB4ZBfvPa5NhpxlXdIrY3ttb+etHCKoJybLKJWQk7qAprokWcbgZOjENqynt7AKowXfzow4xMXX/SsuNY0rnqXY+bJw9P+md1BL/n21NS/wfyO5A/v9SYJikB/dIp09c+NI1iBf/jdDGBouXHxYA6TgQ7f4EoYOEjMme37/O+eZgZY9l0E1XvezjmSZEFQ/sFFYTKbFNoqY5eWTmQS89FZC/6S5gz/9ZkH71dy2t7/YcVMbO4aSHu86/jWrN/g/prPrpFTeBDF//D+CRgy/6tOsgi1qsWv9xomG3FVyaMPd//+LZ7QCYLOLlLCpBlqHmjffsyynAcovpvMZdTafAsIRLY2k0/zaBF7ygZkAoKw6czu0gqmCU+xZTqifcFBksMVI1flqHfj5Gfc+syQlJsisHf+6m2eEIhf/fJRF+Xgvb2a9Be59HbTvq8ileOl6YfHBYD8X2kptQJpiv/dJjj7gc25Y8qq5erpY8Xv6S+pmc79A2QuQvwvKb31rmC8l8fZxZhe0uH/wNZoUMqpCVWaxbEw/I+e45wk4R6d0zWsBYEEkMZ3nt5lLwJ1PQbumRnX32i2/XsLRlwnHmnMJW6/VlVtGEf7dFGI9Q7maEkkNC4TCeyEhr21fk7wz/eq7smfUJR+TpkXwjJ+W/tYUwwgKAFeA4x1zSuawtoNqY/o//nMc7isRI+miMfiWco5hYnzlvWsuABhFXmQbNQw/Ceawh7g70J7XUbLQi6v9gJ6My9YKREEs7f0Nbp6eIc9DyEYjPuLvxU19uLz5f7okye/7bTl22Xc+N/gXzz6BcmAI0hqsEzK/jEnD1lwWcE1Jxxsee/xKHjBxQbg1jWKgRpu2H6qDhN3e2wOPj+n5WdGukImWamk7ol+TmnNSyT4ewETvX3aDOii/PrnnGu1I6thcPWUvYsXmZyq7M2/Jv3FcBkaiZCxgMKrDBS+tfyqAIXCPmy7uxANZ/9YpR/QNZcVD2ks/UTZN9vOWsbzyRdICN5ZWX8PAkFBx8vNfbq+TuKmaD6V7W/J0u4B+h5s5q7NIPSEa9XfQ3gEJz4TuxSt3PwKlhlI+4uhn3WIpKMZSbPvBiF0BgGUq/KudQT4KXZlctAcf8j0AmU+ZJrvU94uJt9X08rF3954awG/Zh5MZnTd21knA1HkQ33kTqqX8crBoDz9SFJ5aaMPko+GzYA7HZMhM/Tw/Lo735yruzkeArIsUOCLGyiIwEEFErhPS7sS+LVjICI5sW7QcfO0YPscFs31RDoBICKyB5QZXa5kFl/E1Kbf7rwfql2tZxpQc/k2hI0BvX4e/Efe7X0fG2BBK44g1bQCuN/suRrWFjfn6PA8gmPUdn4vUB0QBBdEFzzaXxTqNFU0zKBZQNutheuqV/6U+wVDUq5qxt9T2uuuuuuuuuuuuuuuv9rbW/HcABmwTbnDrMFYXdWAtGd0qAbJs9n/hrbiOKgP+hfv5ThXkTCprlABnJSIBnUP398AERelNpzihMb1CgDrrJE/5cADgi6B6YWWNy0LCCyzwIR0IGQg6CRzYlBLb2QTFTA/bJgkoR42jsoI0vfgH7N8gC9jtP22zAcJM4H5D/FyOvNsCMxZDRZn8/P8bU9k/1RWOn5SW//NEYAvEqB8G3ikyxX3Hr5/JKxetpB1A8JLDakd3x7qjppLPPgYEWQICxR6nRt7+VvORx33nV0VCUzuJOZe3N8+PgNCZGPca+mQmnudXq7Sr+xi9dtGuSEyu8D/vDrQot7+q2Y/b0AANfeqpLjGojvvwYhOq44P/+wGgpZWOO04AaiDVgJqwhZEyVwElsGdF/9Qov4BvmPN553w54Rybz1UUonIVZsn0biHwuwVm+dan6fC3fsmPiyOMkwgyT/2v1uT2T/Ldm0iC8u+2NGD3hghFhNPaXDVKOrmc2AjYtqcXeYr3kcegBC1OWJfGa3Ro8zDKY0GIrAl3D4p8io05NzY+qrTzYqAAem9/GbMApgScRgw4IA7qEPoFXyCb5RIfBtwyCokj7voois+MDODHwgfPHE0xKWf1m18UhfvonNUBQnJwN9xnRkC/6Tz34h1pnIxw5paw4sfcMuEHqf+3qt2DJqzfEDrDWoNL7aRadHgoR/c6ID+MvSD/X/gUqkgFCU6+0kBFKcOsyXGJW8Y/TlnKPMFB9SyCgFc28vlJAyG6zIQ6phUImNYmEwHZtoMT9dsOWYOMgSpbeB4aBQZP5T4M2Prfo7beASf7K/udV70pfYqObNyoH9ZVfSuaOUxVwVzUn4bNoLqabC/dalHs4ON5/ujWLn9zYev/7amH1SUzf4H6Cn2gA7WP8pkeuriExQRqS9lpYQ5z/VFVpFf//z0ZbAndw7D+QPLaE0OmxSDyocvIpq3cj9w4EQ45y7tzuec5543eJaw2fLrSSSTeGJQBDQHtm35eX7wMnbSoBffoFg7ZgE3tMmUBQCvf129ZiB1BDe2DWQ5GgH+97IpTEodriw4hSa9XtDYU61L25gZSd/Wu/6iS8BJ8HlJzvXsL8Xsl3YCztGZPXgiAe4gFWAdx9yI7xlD6WZ3X2lyA13f7we358HdxYxNjIaqayfy0SBCkUkkbsjb/e/hlKLmMQuCjG073YeyOeq9avq9L8UwZ98j3/+eUi3rK9aQIOKGxH29o84rlC6WZX53oG8MiFovWx5Jxuz+wbDoNguxhMtg3MPzDjwChCXo3kYiwTzzXA+FCt7PTr9+EB8nNiphxRohJUf43i3J3Wrnj7ckaKc8DXCLVfXJv+K1UUMMLyf2mq41PfIAHzu3iuAVeO9MBpFOQkfbSy4Fh62dRL4D0F5uqrGTQ7/UEMYC4uZu5Pr0kBM/WQQXJtYz+ArqF/HpYf2/k8O28JTKuvxOL1Jlti2ogmeuXNf9UW8YwVGTj7D0soRdyr94obEGWTtWET/Se9/79EfbEfkXf/+/mBnFylEJELc0tD8fQpOJQYiffhodiFRH1PexdKRjsDc6ZqTGw8Ww36TbMT9BbsSURHuiqhJsZdb0Wokp/9+hAu1x/wUlhWn3QoFvNz+0mH4hixxG/fa0nBJDBmZqMrBymKUIv3pRDqDJtJ7a5dDQdLkI8xTPZZOl03hxDGWhF5j7BtEwWORB562GzMhRMYBx6l/1lkG2oBUac/+60pHD1JGedTjQbOBZrQOLggAY5AgzFm5M6owjareZO/QDIDXMJyQ//WV/CT3VI4hz00aN3oxA1CNgt43zTLjgNzOEdVnAMmh6GTTdYyVCp32qO1SfFcZMsfpxqqv/33UH5yuZkzQ0w/5qN32dixcuBBdhAnggQUpXlPY7es28a/uzlFio4+5R7ogHpBAT48XwDZw2tMK1wpoueQVSNmwzCs/CQWOBjmwkgz/DfynHJ4Jia6666666666666///hBdgke7QAebACRO4BwkWXSJyl0AcJSfAjKzz3AHaGuJnrWws1oW4QAk0GyqSEorW8wIv/t+8kVmICMBFDCtRfGBBCCAut/K0dCMzEkKouHidQS1zJeSgEl5AhIN5SAR7EBBM/zNxhneH5mSfAqDYAFaeHHYWQfDUMBGKhkAYLggPq+d9dI6xLQSI1z4CiB57sLEgCJY1iRQsRBCx9h84cMXcLYQNUoSDkw5rSe8kArC2e3P6rSIT48GvvTTXNsgzCypRdXWluzODwrX+sh25bHMJv9p/+XZYJJQqe6bj+9L6w2TC0qtS1LwAmYH+ryHzglzS/bM7lb0lZHEkDqoh9iUw8UemQFvqakeOL9+r+WmsJx6zrufAmJjoR+Jb0Z2YAfUSCLl5f8IvF5sz6N/gsUiAXyGxJRCtJkV//z+UpnjN7Ha2rTIygzH5F7/049Wuv8JDrW0SytbZhuXCKZn9V1P6zAgAIZRAQJ+4Z3bNsz+Nz9P2K/LdnaNSpgO4ebOZ2aT60QhJIUvT0VW4wux4EXVSe3gl/svCYmDniXYDjv21BzFGnNQz7/1mE9FE9aRsev+Wrx6Be3JHRl4AnQmQzwVaWnqHV1q9DN+y9tlwvGb33QFzYVGnyfTB37nIF8qBaO8k8z/tuP2wAnJwKJ/bLBJfUBSymw3feKFjR3C6NorFyAQAVIcIBjkfrfWWOVU1kYGYIO+NI11L2osuG4QEdzQjwCavQXTz7dm+kAit5ialNScceH0mFFQWE6q7AM9jUpobf/z+8evFxL3tsazQCiDXQ275JlDfyIIScis3YsSMzyn/v/000Aw/+c5Dy0LosCIY6AOSFttGP9HvuzfSLLyXsEe3I72qRwYUZBiaY2ckYZ7wse2bLgWPo9EyUxIFmsRs4XOCs2Tw0Lmbk79PVr801vwgHqMBZoMjP9Xn3Fzpu6mFS9UUrSZ0fA3RdAM+MY7gHz7GbFjd7/vUqWTZ45wQpYf3u3hEtJfXNTcRf3XIIyUUnQ0BAfJFTElnRHVLTBbAHWOMA/WQoOtqsg6+sfkYACvf9aUh75uGYfXx9627yGSVTrh+koM6YVIQv9aeFL5y053M3VvAplVUq1Raaw3OiK217Zs3AUEgj33ccLzSkEmFJakkEWa2CQMEDQoRwQdHsiAh/CuOdjB9x/5DhkgHrY5ubGP55PGM2YsmDr+FreSjIDSJqhc5NWJrAnkT/PDM0qYMD0i5flwKFj8mEuk1YQroGWvf/ZuI5JlIGm9t8WwJi0VN3/vw3wvhb5v6Qz/KL3SFoKiOfhnv+50+KaAbv4MkoM5VOhyRtTKVcCcaCeNXdGp/4F7Mo1J3RUNYgquA8okStWJ9RnLKN5G65ahtZSNtQpYwIpkTRG6ELvHaqdoqnDdwVE0oFPABsHdkaJ1cqf/twWMNe8fHCBzL3yvOwYtDQYVpZsBWJLkKKuPG2FxdInDswxgA5lCUJTSzez+UrESToAQ43cCOEbKBb/Cp0swqvpiAwri1/1yhOuIL6RyPtZBLmjpiYeP74FvAZpqb275yd61rI/Wgbj8yZPDjug3egqAy85mEUtLFXfaB2GDWgIfesj7vebyGHXG6Qh5tYxz/MpImUBNiYkza21jeWKNsRay4L5y5VMq3VpnBlwve/2O0iefvbeu3Ri0ZHLas1vo/n6u2YQS326fGtrI3cjb4u0JvMqbBPtyMMzHUJ/Ob+abd/CD1zzMoPQ7SLJAL+0AFcB88AYvtM3voweRf/vBgoyCv/su7jAQ13ASOf6+4CTj+t1WmynQJkYExJanbBNzas82kvU52sQ+0khDUZont5b6QqBlZfe+9exkFTEjYyS1IF9V33no0AyS/bpgv8pyHjGF//jcgA/k43UR0KlMn7RfU0giTEEjRTKtMnuXt6O1Gx3rnXAQXNZvbfiY69oAiWb4eAgvDE3sqoQ8hCVTYF2VrxhnhkaWnWLRuK70rNWLN4yMyXUNCANGn8lDh/LAxHnDzvOeOY///o/v11111111111111///sODuOacGOHPOhsi3+gXAHZYGDCelSYxv+hDYAQYDa9kS+/QcFaYKE5ygxq8FYMwQkBC0djTQ0MoVNNBNBB6CgNgI7v+zfvfYBNjAcP0cCfMnIgMPHabaFU8qUIPEyhT+XsBN4b8ZKZH3moItJOSuGoeOZab2YThIOESHYcj9xkCP1Ksd7X5jIWwAuwuJ1qJxZDfyy9Ti39nvU4b8K7IBp9marp3kSfC3RArAzZY9uu+e/9qBnydajlLwlP5p8cRjJFwGLA8R9rqBdO7/gyopokKJu4rv+fo2BT3amCAdZymjUhQtCiin/F31lcwhrh7wXVzpP963voBhG7mWNPHi6yZcwBjP+2vn4H4RY7/cModGjW/safgmIaGECKDQKgMw+IEmaQ//4trhRZTUkJ0zQLwZ6vuonxZpHIKj4mHelpX5sn0gszM4dl6ziHryC7kwpfX+bNRIAtX1mmjEqHYxSBhl9T9rpv+gFHnpYiKuLKM8AKIiUorsTPAOAAZ4YDBgV36x1Rv2OMk+gsfBFgsGEnkKaio0lKmQIJ5YeP0CejpiJqjmUnq5Y7cBZTGFl1hm/3TsgyX0I1ru79+eqe6f+1awvR/WlJGf4tU28qmNhxsUqY1JJ4AnleHfiv/CHUBCFmdR3jxM3iXz+0Xjn8ICYlCEMuNocoIF/HhcbB1K00lv+yKJi6EOAsGVF95asDpy8beE1ivs6rOciDv+qEfgE1O53F/gYldbQcacJtA3W22/DkGuxRqwoQwBoqlet06m7KMryl1kZNHgpnBOGob2Wvo/q6CiO0FomW1xBtcrwqmBi8o//3ttPAac6OU9keuoWt3wJoliaq89uftXBjz/yG9vuef3/kS7/lzthSprmnnSdN3uvOZiY47s/glnAahkiql39vujMW/U1dreNiNaixeDqZwOJM/mXsF/BH3pKkSraU01Hpa6jlmNXDT/r2y8sZ9/nKXDnl4fPO2Vbfw2uv7ZZmhS8ytyA5MrZdkqTuzqY9dcRHVu6p++F7nKijcD5fGOpq8TPziLvwMgqGME6NRdJC+XRVOrrNs10mTuuRYqIEboTMRIKeKSOQC/aeFqACxtPht5i0S/W+dAk/3MnXTZq6adcFzLKavQ0t6xRY69AkGu4G5lsnUY9qb+to9NAv59hlVkCizAdPro6tw9p7cf8v82cjdkRwFBVJOu9p8dqiMln6yBjBmMZ5ATCp+/95HaEioGLlmEdvjLBAHduk4MMGOxJ1T9VNw14Gt0tCv1rVNfdgmg675u8NRsvTZB8qy2Kb30k9I5pk9eGE3+62wnlMoq2Rv2LaIKM2QeD2ESNnEkZXv/xMZrAxWkp5+C/DdZTn5jKArH1cEzW6C0yZHk//1jK2lIV4jqsuc6dub7Mnor4uK+9PVtevA6AYoEARhNi3D4/A/5ywdkPYTjJVEu5zjdvAiijwagd5fQVtuyydQlTAJXIABARC3LOPISZZy6Vom7qjK+rB7+x2oHECQZIDkl8Wsscj1lTguNsNmh0gm16UTLxpwIAu8H0+H/ceigD4799vfhx+m79E5tzlRzyoLSBxC2kP4MpV3IWrQf//2GxY09khfe9oCOb/mPxmkZJ/ZDj9zUtddddddddddddf//7DgQh5QBnfhkR6LTxY8gLcpQ7HoeC4jO4l8eEVLkr0t5r5f9GZyLodSkRvrI+IOqKdH1Rw1MbLBi9Xng8tHVWKTXn+zzlAHyl2UCWJAZdP90ifil/bREJDCg3coUVOUUqb3m0kgA6jrVq43lNh63zMq+DPfEIUgl61AaKAhEsrN1T6q6ihgilBhygQtf22iBedgzedevsXot0i3lLOqk8tLVlexthZurnhYJnyX3HmQMLQqpiH3bNx1ZkZjAXMVWYKsBfkeSYT3F9ro9NMg9S45ZnsHcYdGxFfG+qPCr/rNY90I6uHtyB8vaeFJpobcrU54kSMxbjt1pWmcVDd3cYzNbETS7w6YmYW6OPRSf/+SpZjgvNcjiMqct+I0VMNe3hQNjC+6R7JD1xm88iBjAMLs8Mg2Dk5e5DPRN5fo8KYEsJQorJ8lpkKrjtll1AoOgzmakqjGd3mHBwUYeNAGl8nZAdLyf23m5b8AgE3Ik8EfUlgAf480IJv9MW48lNcgjfdVFBKBqrHw3fRX1msAwub6QzpkKgGVtHWMa3ybTqn6NphixbadRJSjrZEJIyD4pKPjtbv6gishI7vRA5mNrGtGP5b7ZvSO6yPrD1EbnaL01PAaT2pduITuW9mFllMvlnP9DxMQtr1ar9rU2ZzSQnvBXHtOwn65YcyQTJ7h2NmzeimSXIiZ9nYVd+WPZwfS2ihFs0IibRrzcOl0QOU+qK8B0lRYCUbPGPn3bUbmn/K9c4GHn5W3calEgAMK0xIJnCGFjRgDYtqIbaJ/XAwWQjuT3PO+8+zQNxhlnOalX6xMSm5uBkJivMdzu/pKofhHRjvJiT/rfoHO9+ZeqWxoHg4P9by19642jlZN/NzSf/K+xs9nyac0g0aQ1DFkZWk5mtXGRpih4IjgaFcAMPXT3xpEs1ap6s8N45IwEUKbg9wo2jrLuQpIaHD6f/3x2E60H8D0Ie3aHy4JZARYYEAdowx5+oGtooz7iIbI7Lmb5QJlkNOBPJNshIt+/tofjRu/mSw9eJYYRphQ9vjUKFBCUw+Q9UxSkJm2Rvf2sG9MmJHC+Mxccf0OIbwTuNQKOuAMg0PZ1ttTRet232HMzxsbVHNBqTRBSs6ySxMr1ZvE2Yz4GVWL7fLvH2ltO0e0oQOzsaS7BuuXSxy2G3n0VU85qvgVS1WU0+MbDeoNcijv/bgUIBAOMhuW42Bu0gm2fPYXYgbRgIKICFCaNtG/Hlp3NNLd6vDpSxmlg/TWRJxB3tRtOV3ZFPfJkdDstIf6oXAJK0n0s+iv//9hst72gSIv55zGe982Qvheomuuuuuuuuuuuuv//6MODITnApwRjTa/56CmwKQDkgMSmdP75AcAX+xwAUFKxcKDtpRuEBQRcVVe/qCEgIUILish4KqCq0AIPQD3ZpoFcTADz39NOlHIqwXFUVTcWQIPbataZLlqIlv2dVZ5g2AkcRY1ybZM7HgN4hbgJLqMo04PSWXj+AowBABuvF6UYz4XrLtv1rpohIbBlqTsVtYXlZ5VIA+OB/XP/2awT6FGV9Y3HiGaBT03STFGZRBElJ54iNKpH79a0RJuYITiCAjng/VK8T1h+123sq92ugxCwTGU7WknX6dnkCS2lPnMRpA5xwN6bxJffeAaRBqc08LOC7RfWhPi12WJyEDMBu60XvPity7051Qd1OTqH0yWNpD9noqjJ2sX9xPA+3DqxYqERpShZb23DyDPRroYO6D5C1j2Q+/LyopM2wg4QFFfI6Axhk0GrX2oEPumXqN84zdbeVxryaiS831vOTh88NBjAyY9oI8lelf3P0EVYRKpuX8NmkBJNk+X0av2BGbPEnkH7xdIo8ZCt5KNvUAwCeBbxr96+tPDq3FfwKdaYqvb4DLELCdKbkr7TXBXMwCboiu5MDE1wWLsfBU9H2txC/vi9zc6tDx2vtLg8t/ne48lFNCbROMFPxEi2/Z21hKtUMNtpc/xK4jRCzHWD+oAQ2mnOJI07KM8T/2dHBXgbpRx4Ft4mAibscVhy557WhSaM26Ehv2CmmbAUwvF9aWfQdU0nhvjLs1J1Py8xv91862YsxXlw33a9RNhkGMgMQEoFiReiA7Uaneu8hueNSn7kaPcCQbluurkQkinBf+G7+SQGdMsVY8GW0lJ5sw6m4MA+eAW4+X/uzfd72SXWYHLKzCP7S4xKmJYDrK/DAwthEFtW9gp2AoSJ+GL2cZzRQrfaaOiolUcLiEJMYrybduyWCbeZZeCZWgrJ74Rw9ljBDzkqOJ/pmvzKJF13Y0hmB27DeJXobwDuyaWAMyjjCmcc7TZltbBIKBYBGfgrVBQfYzKqMFXW8bpwrghIyG6somz9PmUg/YAVb2S8H+X6wWeAlqfh2K3D5pZ84P97K9x/k5vp9sMWXn0gN7pjDBvEnFvOg7P/WVEbEgS1GFfqml7n7TgxHDbcNiWoAh25n05tdgkFna9yO61sl2FYQ5pwOiN34mW+wivPBWidTLkQy8dVY/sgPAbkc2+W2NSHpp8tZybDUewEKBZTqmTm3Id/RtX9LOwsoMwC4hJiJ+B1dj9t5n/v1v8ui9PhV0Wdu8nVGTcDug34YVs5lHs2DVR+aNYwk3Htl9/lqnehuV1MJaAI59eGoW4e/7otgwYQIBB9vlIRWE2jspNvzk3aQyLm2KCcQykrqRVc6e1PP5vIlsxTxVXdRT7f2t9hsSR1EHey8ZKTPpTyV9zKruZIRi5qeuuuuuuuuuuuuv+n/oFnCU0ZvPgLAJiXzj383oPeiJ0RKfcdwEwMuLWHM9Tg6CxEOSAa1pwAAgAiK1bEACWn5ha/8aDQdTbTMVwfdOHnjcrj+qAWPACuYHgwr3gPGv2eUcuFwe6UtJlua7mJAi1TUR7Qcs9fiUHU8A4JPLCADgFkIAHg0nFiw4e24MB2YAX2YVQqkE9Gz1xWEoUVwATVmboGWQKJoS4XgfMP+vb5M89pe6Zuexd/cFCEMz/3vUTR5OlCPXPTXyL63uITYdTV8jodR4iGvV36zvbnCRMV2ChGX+HiqCE+zwe1dAcUgujtqDd02Y31mzHhb+r1tHJaM0dye7GfLkZ63h/BPUz0LK/38/afS9NzIYeXrLt/URAwAICPXySiZ/+R9Ab9kWfpOHsJiQ7Qy2HFX2/WCiqgNhwiyg527voyUFYBhMU4Vec8aZoufhcSVxYyoguVb+xtd43yk/PG+AqkN1bRDBj2swczzfkTtNuzdg+59v4arBdN2aeruZtnMSCGJF+Aqe+h3F1Id9AhKAgieN+kd/PPM1iuF2Q5gq+AZwBwCFk8TROmKCESP6KIw7ZGPYOY3Xm8N4WIED55FBqNFk57gxoHLiSFXXh505wu8AIn9pIApvMDYDVed2gcVzAGPKR4xeH+GZg3Q3lBkN6GJTgujCdiv/njqTLZ75zU3dvjt9xIHNu5OeMisYe/DYQWeivUjdLxVnMmWArIIRzq86NJAOwKmcwHhIrmJg621NsWdYMbjralLrnuZKmUAPKmC3XKELMw6SMMvTuYNwuvPcUSk7mbgpOJmTCwLbTwK9YUlA8kDY0P6c0dmXrxMkcTXH+iEknuJ0c+tu9Elkeve2lJ+2JjCwfMsnf9DMS2KlPDKpHknv4Xyzv0BPoeAy+X8MeRuM1ZhhtHeLAoq4K8IE5O4wY5tyPsu7UuCXoBBL5q8A3geAxdvE5TZB4VaPZhvzrVav2hrPvDiPAp39CnVnrUyV5+wuoGGS9iqHsN99bc97hDCyTi8borq1NoX3HWO2ERV0eGzzUJwl+e4ZHwn4nnMaunYwoN0UXDkmje+KZXqP+wtLfW3r6Evoq61jU9C0te2Aihxl37PntrlcD/oAr0A7vBpSVTsbyMiCplOt/vPSJEHuc8oJMT76uvjBwCkwUAewBl8oPBKaIOpyqBLyb/JOlYsFRroZOLG/EgyX448hNr5byqtv9r/+9NqjD/DeA3Vu7/aL++NNhqMhKuHBGyga694273EzPSePjfdYEhtA5bjT4H2/JnAWsVmFgk00ueR3rllJ1vBXH9yWsW6150zXGiqhiG8cmHMmEkjs2tvOsYU/MczS5Lkle5IuuuuuuuuuFsBEraQdX8Mj75Y47yX8I8bTZjbHU6Z4ESBXAq4sFHvxoIAwJZLGeAdljxoBlwwtZmMy/Qn8XXXXXmlPnpTHBDwM4AMTr3UBqXekr/0BZirxGKMm8b3xHDUt3+aCLKyrPDwMEIWzJm4JZqP/wBgLGEeTDWo2VQvpFPvaV7Aox2BACYMaAx7E7D9vTeL28BEvG1YKM2kNihqPLYex4Bd9AjxxjJolE/9zDqZusnvl8+w1N5sCWiCkw+ZaXXcuf6JAiFkTPTKH24y2AkGcgxHcaynSxW6j/8kKaVX47+tbHz5gAgb/+yUUAqIpAoPv/pCeyO/uebTo/jALM8rab/JnxR/XQgg4f8FrKXjnYBcPVQlhAPqkiN8it4mCihzRyT9DjEqWN8Yd5j3+6Zvo/AYe9sBY0Y83Wfr4KKYju7uS2fy/0hA2KE7ev6X09FuAursa3QFs+qCOuTqZe1bxjuErG7cdD5e+ro/vR+dg7nMisAgIDyHLxpT4MG7P0ccLzIEqqWdFxoPP4bNAqPMctSx5RtoXYP7NYuZQU0lbZbc1/R0+xD+0dbrq7gwqYh1D4E8Cd9E3+RhhSRjo3+l30nva7ughONrDx454XaNdM4bcY5t+zzBRYBOYICwXCHZa0tnY1ExFynsc/rceC/7EvE+fcsNkI0tX7rngEw/DTsvmjJtEc4ugUVZB/0eWlafsuoC5koq/aVDjVe0o9yd2O39257xflp/zx0GenOxaO/Tv/iQmAqI1y8nrowDc2bmckI3i90umbXs1Qadb/XA74LNfpm2KEKqzre4uPWP2XyXFN0sX08uifnZ2C2OldKI030uu5R9PQw+eOuhOll4dDjUf4aYx37XB8iImJ3Z5Jv6msjAkgQA1eglOzfNXX8fMy//XghWtgE9OnuAbDbUHmC6K/N5OBfviDibWMifKO9so7mB+BVxxJEj1PNVVRw1SULqoMLXafsbnzy8C+2Me/jBoZbH/fW20wl6qzfy3DawWYYyFTDF7WdaD4t7+qY+uuuuuuuuuPUMZaI7//rrrrr/NJ/6BYKPTwAiYoKZ0KB9KYigT/5A5QzRDMUiLDWCBIUgQBOY8YuI8MH5fhXtBjSSdy/0+aKmiOQhhK7MGygzUUZfSsn4EQsCk2OwnqecAloAQsnBMLaVgMvt2xkcxddTg37+MXrttCgVIyDMVRSXXsi+jYwlDIdyJ6NwHgJfQVAOQQABrB3BgAGxjwU8y8eIfSYzBHaMF1dTIyewy2nJHZPRpgvmCkg4Ij5eBFVfQFTSJgzmkepLsAeQb/3Wf5/u65QJ1Fw5yn4in/ioGcQpwziYHTEEohhYdkZ5/xj9K7eSsqaYUOkMWqr+PqincQe3cVSmsD3ZB5F1d7viiiIbFNYSdw2w35vDbjYWbbtnjWjtT8UGjunXT+Z7VsRqvVqZclbK6u/S1F0GbG5GS2TtWsnarovwrhd8rgroYxRqy9xH739uZMIAzg8n9ZH1kR1mFA6F3n1kuX37YFJjXA8kija2e68Bv9/WihVlUIRDDQ047GXwbeRPt8S04Id0x6XJvuHNeB6KRmFR68Tsp6pfk/LzL7nr14EMynmj754pmaUU07/ZuWP9ucDWEpZyYk7KwGPtO6sbau6Oi/xBP8lYsIdtrFNo/a9P1zYqlhIfDjMkyatPvwhxAmANZMUblqh1/N85fdKtm7TcpNS8zE6xVn3pyINymyA0sIEAmvpgMlrbeMEX59ZGcgmp4mIKOPOsZ8ZqDj8DRe/omeneCTaDGFyycjj64gBT9ubRUsL/BKn4K4wr46N/eoxQ0gLN8943hHZW5zGns7/+NayCPtgh0y9+tj/NLC5tSb4os+wtqnFuLoTS+wlBFC5xt9hsnUEz1Y+MsZl0J6QHd45//4KxomNHbeHAdCJEX+nff1LXXXXXXXXXXXXXXXXz8/z0OHeMe7m/rP6ErFAkSlEEh3S3/6bTIDt4DMzF4rdPL//OBk4kytkedNw5/EzCWFJdd86GEAAVBDxAQABgDincvBcj2KjFibVdbaX72Bs8xTCwSRlhCbtbQaaEiZ/fjUOxKADNyPF1AlAc3qNUlw6YDajbcQd/XX3AoGUbCXOiQpBf+WCDLcuUMSKvsmrVaDH9DjHzqCAAl4AgQAI1A9YCyQWNxQXi4R4vFivTAL5iw9hdvGkgtEkdHAH0rUqdutKuQ7AEEiVfbxbm3xD02tTkdYbphETleOr1PP5ZfK9fMwoxohIA4lK1atdEgtmN4Ybu8om4VVUbH9YRDPFt5hRktM2sOipsCyE87/a/QIYGhFHDrKsnp6B0hyl5Lrb53zVDuWdphF/TX+H1WZ4QhtaRzRV8HOLayXQHmXTT6dZ6YByySbYzdE8wisGobviJme+enKCQUcLKamm2C/shTiIKzEEonOdTubU5gbkZBFCeUOMEZOKWqbJnT4kgyWXKYXbRgue2AF9qzHxOeWYNks1efgyLwOPCrZM7ycRjvIVcmeQ5s5ZWTJ7IxratIK/eMchrTKr0tLpJYbYhPyzLTiLpCUJBvp+7ipm7g5OQj9WPdBCRHftZ3O7UrzEN5Ted6qSBW1YYoHdps6P2aewvvkJvOtQAUyfxG2Q5MffBZnXCxLBYogeepTsD2Fm1Yqnv3bfqaO3QNCw9C7UeEiMHC7KTvB65SXNtwcf///grLDH3XLGdf+EJmYVJMS7+TvfU9dddddddddddddddddL19V/0GhngFRTQ26HQhWh6wAuLAM7HBOAtI16vLfwHQQ/ceBOBgKK+WTnPPyhMWwIAAiEJICAAKAphvgIgCtBmo359tb5sD28G0oCun6/+uqoqxqvkOEp5zQatRoBKGeaO2S75vQ+GiSnwtHSIoDVRy4GWNKgvKWgaIALkHEXjFpWlyG7/5F2fWk6iuLd//z4ANFodWRTsjw9/miBJ0A8s2FGkV/3ggnOAYIARnA0UtrNezKuMzBoIkugw1gijafgwlzgl6zAV4KXWCcKqWBqy+gMR/L9rVTSvVck2Odfj5+lK15lbqyDdtxzwsrymnZfkDyfgd5p3uEksLjMXZPSBVhcjfxVslsUQjwVQAw9Uq++30ozUfstreqsNZAF3+36DlRTAdXFxI+1+giaC7IX2XRajxUeRAKRzHZAK52F0yEMa4+7Q8CDC2d9BawRU+W3HjGPVxMMl3PWA2w9KbD1ufoIXUwRq3LekQLfnK1wo6WmXTiG21b5NZ2cukGLOoxg8tkzOan9UUQW8ff0qtSfvquqcBidukSuJPUiX1OkLn/s1+B2+LmIMQ9z+rFi15pblS1feL9Qc7E8vips6d0bUrjiCF1SssqZDKp1lkbkaVNFfoIJ1N8JIgx9YsbOVNzeMtN3DWYO7uAE2Z3Du6PPBM3Lc/7TAks8WTb99WAff+7KRGvKuOjfODE4GWh5iiqlHPOpu/vypkX/8NFbPx99R3TBuM8kzK2++98JD5QqSB8SH/Bq++p6666666666666666666649f/0//rj1/0GhmmfGQHpUH41/2IAmd2DmP4EaVf/8/wIsRCBl7dKG23TR0FCCyUe8f1RkHrjkMUmAZXcGC9S2aVWH++CBBPMCAICqGgN+YhtRsJ6YArPP8BIqCKHQXUM0cVgPVvP//H/jqE9MlBy7kCI5zACuig1QKZM/956QQU4xd54DwhHe7CX05JAn/GJ0kZ/5QZhAgLq2+BCLLBXFJcZfmAELZ4YLarv6/wcAigUGgqCR2zFTeyGS9g3SGaXvIkwLzz4Uzp5U8df8mzbQruUF6jYIICwIkWCEC4iKPlr1QXMCYPSHHkeCebtBDomU+M6Q39EABAV0BteBx83+OQZGA7aR85SQGPicflg6DC7BRqdipfQpQIV2cZCGeWF0d5dI//r6nxTHFTJRnjvxyIF4Gh1Ds6Aug46/DoIiYMviS/A8BAn4cniX5/iWoMMihqHctZMOd6i7v1dXvv0NKAz9xc81DgnkCfkHgS6V51fWbZdAmwCSS0eYmExQcKvBX2RQFwKwfvSORCF2/CtDBYEKtKA71WxhrQ1QrC35x2YO7uH/QaikOdHVkh0LsZtsI2u+P+BgY4H6Ybh0PeP+J//JE+uuuuuuuuuuuuuuuuuuuuuuuulrrrrrrrrrrrrrrrrwAAAedBmjgK+Cwvv7ixSrrmsOF/9yiLYZQCbwbL+X5PLkv++PLeWz+Kae+4vA2sXR/L+4sk4egq4szijA3/L77Vd/lKnJCFnuERAo3cJJzC5GQCi6Z+/G8Ltfkziu1zxM2de8ZHadL8mDp+x1uowqw8lOVZeAmM2YHfz4ekvy+xIMz9wnfeRKUyJRQUwFIvu9tMiZL9yFfcKF/vbCIoVhD8x9bwq5XaW4wmxfar2bG0nnFZiMcdoapmzOSGikHn8PBc2Up3CBTp4aRd+2wh4pHBdzG5r683KFxCEbdoWSAe6zwVFX3CRYMCvcJFfeVXdhN/312UZgYy+peUsroYEXd317Lebd3Oy+GVZ5CcVFdOWT/xRA7R6SeSy5PeVcl5LlKC9iX/Cm8v/uKgSete/wsjMjIq8J24dvl9eue7F3fdPC22c/vXN3fxF33v827v6mSupKn4s0CNVDE97qwEPu2X/QsSldF9X+96VMIZl999z8J8SZ7u7u/iyvq7931yF3d0nzS+CO74tuqoqfQRNw2ko4S8RpYPuC+FHpmIE1x/3b6RT4rfxOX97ovrvy630yVSAhJovrCfJhp72Vmvbu+vX3Vdfl8vJUx3chPl+9Kib3yOGK733k9S12cv0n5O5+GpIjVKtVr+TBZAAAAC1EGaVAK+CvzDsclfyld9bw0vcWIAxwdBbMV3D0DvZydfi7zfD77eUmE/sMeOKO+8iAwjfiM/bm/8oQ2pw60EBvzzvS+metG4sh/yhHBjg9e9vlLtFD/yw95uZZBZe4RHbgTBLfxwrd3e26oS2SlxSeTfbl5T7vup8hfCp/RtaH+dyMe4+Fiere7jxTu4R88QO0ouCOGQxJZ8inOyiV+ji6PZ+jb7lL9PVhCQNPvH1Z+CT6lWraNu8Ju8f4zh3u99G/YsmMtreWOcn4u/zlhqHtMH/nki0XttUYjljIOWPwm7Uh3v1Ex2nk8N33pal9G4LDGBEsQI96N8pbxlwd/9PE8F0g/bUGzcsz4+e+Z7VxOfskvr3s37IcPjvP7NzEwjdHnLKv7SeEn4nzfUKh5hfnC8687qKtjcdLn1sZmHD7R9kFm/332jOUx8lNdP+Tw1tJYNvNCPgu8Z4fp3reUuEb3ve93tzLZb5PB6/lYVP/n7777IOLs+ZPXS8T4dRdK/PPsVWV3wiuXFmvd8OO5oMrHT+7d0O72/JMfd7P2LLz95PXJ1qfQ6WT73/apOx6Ez/7tBHyZBLz8iEXr3qT7/wkR7z+f8ViTveJfFG9ORCYYv88IwCfd5P1L09F0bUh+OjcVak9t77Lap97N6nFmgicz9l5p/2E3/Ttr/ZRMb1Nf+T1r8Tu99wkkgvKMl2FW5ebiywn48o/cVvaWvFfffOxN77yeGpk9JTVUg5QkTcET6nxwkefJPHNM951FnPCXj816XF30ZjP07CrOWRrywiRxvtItue+wyo2W3sXKd4kYPYl98T2Ejo63f3ovyfPb2bjnvPk8HpTRwqsCOgaJfwvH3qEOPbwoXyfwSy/3G8hXadVy7Ig7r9Gi+SEhpFn3u+9GV+bzm6T2kqEe9z/+XKMihdPsU6J9xNlsaCd6HrvSpuT8XSwgLu+93e8n2uSTDXkJu4LoAAALzQZpvSkAr4LdyjkpKaSy978MebwBK9Ux5L/9hEmWxlZzI31f/tbhzooR0/t9xkPv33z5SIs7GlbezBmvx5Yz1tqbHMvuX1Wf0Xp8ntxfOaIj5c7ufH/lreFX7YLBwfB7uwIV9x0/c1sGc9RYjXZ7RHWX2t8E3vwphhVi7RjmDkGkjtwkXgEx6zX/a85btTSTOGZxTk1R9rqi73v8p33Cb9sKCuXiR4rLGfJD5YPtuFuwg0OabJ9J0d2owjdOwyt4cVYa5z/+Zd+rVxJSw25Fzmo81pJ83je5PVd38v3tuytRgutKsXonu2e7WbpBnuZPSSPV/J6qltOCMkq5/9CXgny98/vvrfHEu3d+BNv/OjdPJ7f7bmLxmXSW4RNnDwy+cGHnTzoX3TROcnpJP7iy8F+LkjdX5x/TibyD8wBDkLXyk9t69TZkL3pwhuYMuiiSweyA4gPLHDafKknwlD737vH8KZe0txMIR6gN2uq9J/vX+vrFx8eHQdAj/m9tufrc9OqF7t+nNwwJEQldq3JEzsTlZBofp99ZS7uEexJHcvt7vXllvfq+n6/p+/F0+R3urDRfT+2uEJdAh8M3J7f4q97vqvSTv7iQru48Gh/of67SXe2l9/ihj3vve+S93CPirSe5nnuu/cYR23vd7bPu7it7rvaX1VlF5RZ6S/r6+yeq7XTdKUkwavbr65FhDyZWLrS0l102Y7y8v19UUi68Z3Wpt3ye2vWb0/eLEQRL5xvj4qvSZUpRKKen+Wa5++EvEinus//iSiu7blWe7rSyeklv10uT1rTfY1z/0N9Y4ic9bvkj6WhXv6LNd+2toUTd3d/UEN10iCRf38VFbu80u9cEhj5e59iROXZ1p2vpdJU/119LT6T6Syeq7qo65xwOJ9CVE+R7L9OrOFPEEjZ/8Jf1dLbKEyZL39VpaRiiXvvHeq+vr3rW9MlTihdfszu711+SEj7ve+vS4j+CQvNacdSElxy+9yQx5CH3pf+IkK0aGCyAAAAP/QZqPSkAr4LPFjBuXT8178IdJ7tca/YZL/7hI3DN9Mrgm8N3y/l3YzZ7no+wpC4JX5zOw6i9jgy0k6db4wpceGcryZd3e7yfaTZZ9BDu7RThWG8fOF71XlJuMkjlhA97pvufX5PV1z/V5Ysjvu/8pXrCr9wQBIVu5b3H5w9NWRPsqI//+CndfuzAvdx9o/RmHZfaaXIXh02zbZbhMr8CT+fseNr3tvp3g51Mnttru7v3RPTSLotrv7qFPCJnw1cE4fuG+f7DGiT6SbO7bChB3RPOf26NDEhRK1IcS/s5Qyl9il7hQro6U/dCmJGLUet9sD/pq6J6ST5blJSknXuCgrw7JFx+gIdC7nuoS8FnTfl9zof2/bCN7cAj/+bppH99pvpxdzFfjMuX91sIEDvv4YdPiO88MohfgAY8v+3hErzFs6mPIHW7UH8soTUCBdLz/+hrEc/5L00fTnae9UX+CrHaeCQi9zoCP1Xrvl/wiQvK/705qAiekf/wVlSQ5nN+XK7/L+9+G1LzCcfphHwQiLx+lfZJ/He20m8ntpVlbh2I2pC192ZPPbM4a9OqhlbT/6FxhH3nNGOQicXckMgOoou88l5b0uKQQ4+9ArSX9uwnOvOs9JbW0mvrKWUL3XkiIaj9lQJL3AjW/d/baykBNLy8FcTs0WUKDEv93n/298pXd4R8EhOVh932Kve9+8wngj+df9NEGBF5l3mA1reEBbtFYmjJP+GXvZ7EQQZofdsS2/uu/dY9U0hNIQaYNvqxT6oPJ6SRf69tyr16RRp/wh4odmfEqW7fcE2TvHu75PdZK9CS3V5P198leJfevcSNymLpb7+37+gimMd/uTCZxgvfXr86etwj4SvL94zjuFCO7u97u7vivXQkpRL2+vr9e2/EfwhlC7qBKe+f9tiHT3v3MRx08/Re0l8nq665vL681jXnYhDyCoT/R+SMJM69zd95WM37bEEcok/ivTtkenrKWr0qsewka73Fb/lyD79O/37fJ0u4szgRfT/f+uBLiLOv9uxh3fu7SaFRw933vJ9ve5Jd3cJeJEFYjdzt4yY6jCu7vOHXxW4rd79C3k9ctpSarCAt3eP990XqvcxMv23Xq4nKV826FkEXvv79t+KEPLzBp7ZPTapDr8ve/7vCXk5e9/YJyPFbnTeOPaVS/+jlEnye/J7tnL19lbu/xiui+60mpIIt7otV2R2FCSv2R3L4Q8GI3dvSsKF/XxENPNj7dzlerMQRsc9/+0UXulT2XVvp3ERWPq+rP4hbbtdtuka9/qFn5GKEWj+G5c67ctK70mJ1rclCTp6u+9OSviInjfdl+Gc0RVrsly/v4iTPDH96+r6YtGv7KZEjPPBZAAAAC7EGaoBXwV+LHampDcZhiyK3BHZrpRRf98XtLzjZHBjzeCJtlr4wmXEqkn4Bj3V+vU1gf/itaIwBjuSm/3ur3Lvjlv/ceUZyi3u9bOlbniybudj7V3HHy5l3uvYmOJLvNvxlHXvvk4U8wrhRMq9sKE553pXAuyT+cEj35f4xNjkR1tMrOwU+/ceqmoP2hwaI7wplk90t1aIXjt6qewmXNMPxtq7/FaTyL+n/ov3Cet7u/s/J9/lyfl4b6WE39AsMJcUicdgzdz2O5WtkQLlS1+NI+AaFwr7ilRXxwKOTzqG+osXhkjeHr3tYQuc/00WeEC3t3uFDE7aMvh9ll3lJaopQ3f//SeJek3q000Vd4REXvDsk/b1l8vVIExb3w5tH0JeCS9jm+svBTFGWxW6ursnZVU6dD1+4wf4BhrtWz63yfreXhgmYfCOpVMD77HjtzTcIfBU4a/TYRLnJlK3lUDsGcC2rJ1vKkrodxl9+fbRkFu/3fv7xM2Hv52N9Jt3BPy8hKF5Z+tNZIRIH0XI/eZ02kH27aZWWCYppxwhrw4u833XuP2IS8FfL77ZMefb9xV73u96ZUIEx4H3631hAUkiX+9mVnU/2E7zPbPFE8r3eRgT7xXyj33RLia/HoeNcKt3g1a29+/v23vpv0fsnttaTmmvcN20tNoqCGXmXsZhTbhXj73fb/CPgqp35PJ+2vLLu/tfYoxv8fjvMvTWeJFvYIePyfyCCBobbP8ULve92T26/J23r4p6vEkRItt0STe4SfagkM+7XWQt3/Y3d1R+9+r9t7erEFSMIfe6V/bhEv8u4gQ97v7Fumu/QnKT2/ya2poh3rCXlEu4r7F8v199jXVjdE9ctdaJ6164s0085o6q4V8E17u7u7nd1k3rWt6wl5Ch9yfvtIEhnu9b7+i4jSRehVcu4FWQsL/SZTt3fYn13vS1Je9+sLLfCdxO3tote7sk3EOFzTfSW/J6JR06+I6JR3DY7/ZS9eBHgAAAOXQZrAFfBX5R27OX/z6y4aL/7hEksh9iszhD3+YI/jSqD/OleGVPfRY2EY5s/T/46XOcq4b9wFhlJ+uEHhZD+mjtxHRFH13pPc199OCi7puQD3hqlGr3CZ3vdrl/3r9ky5Cz9xorjKiRwfZP5tV66m45sdMrksQ5VHzEJO+L9Mn/jfQWeIRojWofcNXlWLgbUHhsSgzZ5+v5GjPTut07vOwoW7I06l6RINp+HsCc/Hf++Vl6mh/Om2vHb4WbCvL/OvSW5ZV/11r7UqcTu+7vX0Eis97u97TYQu93MPuv533Cj9sKDOXhcVstiuR/buQaYyfdZ3lggpi2WULiBn/vw8kdTSMppd/uExWmCB8O6mh9xdN2riyhPxre4Mfx5fhHfozL32zEpgik4vu8cPtrUvsTN3OJbUWZ4LqWpY5jH3g+En717ZuH4ubCe2Ou7u9lCO73qqPE+W5I/4UoJP/3S4UeRuhwyhJgT1flmn/k/b8aVIElnBO+sP2pvk/b3ehPmi2FmU8t+j+vyeq6te8jBFTpZil+yfek+pcqIyfL90fwj4Jixm5svvv6ip/e+90VTZPS52lwhQq14/UxpV6Y+u2dbeKNCVV1Zf1cQCDb8zovJ7afkuEMN14Ses4NCBq67Mrbuq8npb5K5P7I9xui+u76WqBEdzLeW+sMZxa8wbcgF+F4bun/TQJuCM+dSMy82tPd7iLsB3W1XNVPCPhTjtO9/Cjc8vevon21brlhrKzfQRcnvqt0CEnpZfrvVF9dWLe0ipsnZFvTe8uoR8ftyck1oH/Rqz/LBTe5nvu979CS9JBIXL999OjPqqWrPqj+/yd7rrkeEezXn43S/oWS+77qxPvu36+vvHiX3d931TRt36Izb3CPRNv63xxMZRLdufwd/opXu8n6TufZd7rUTve95PexbXeSjrdVhMl5eH2awX1r3UkJeCYjy/e75Pr5SPZ3f2UpCk/3JTvox5M9r0f1+169lgkETMp2W2viZUW7TvCXipWLhl6arard94ILhVV74fHJhpxJZI7Xlh7wzH8nt69Q6JPz/dGrEo9bUhry9/Tm7vzas5Npddle2i09JrIMJdzj533YeBuHpbv7+qhPxHG0H1ldWi1p7NKvPOd/pbOyiR/u0tlkLuSvE9NSl9fXqzayfb/aJ80LLSxwh3FbvFbdK/NL3fk9KhJE2mtEu/bSyG7veMbW/4aWfieN6Xnz5t/D3xcAAAA75Bmu9KQCvgr8FA7UmakoZPyzYbM3+bw8/Ly3jb7dgY828OD0v/tgrJy8hammO+M8KwPq1f0ZRa+hcOI9ofKTcwLQtn9x5XsINM3bmsgWziVrTRZ4Txn3PeU3VbmIEfHrFkqD1cn6TTrbPN3r3GSb+XLco6/EXJf9k4WL/e4IBwXV7sES8mUZuOjFrL6gWuy9G1/zDkcNEbK+63cZuIj8KndgpHsf8hdG1WLOElUxqPcv2/hE7Gk2HVbbaGQp1UyLfh+KvtS1/jZuYXI1tPWNC3ymei7aOYsSZg2o0k+vyenTkfmnHL9taT8JcOJP9jfu9yXu8n91Tta9vXOpS47N2E/Cg7mldjCdx+fckeyR7VwbmT3srV3GEeCSLqPULpTrHYz7TKv+T6STOXcFZc+P49/ah+HluWMlLtsWUd3r4j3e+urcEmNiQ210dBEz74L89FS3cV5P1f827KEvFcuO79/ir73ft9dK+JIgv/f2YXytNDxLuHUtU9nLEqkA+/Li2HAEnbbNH/Z7tXx3Dpg4hO+O+8BqvGft/Q2O5geYltnKGL367bZ0oJ7u0lu/d2ConWVbTL4n/sGC7bRn9k9V6fdzxfCb1ceW6d73v65PSoXfdkBD4+sYxtav6xkbEr0vNnHWvxuIKB3CV74K5j/TcnNOS0tD/diy/fscDj9Xgj47P4Oh/8p5Y3pTrLTd+6Maejv3lgpvQihpj3YEJ6n+3+yzd8I+rb9xV3e513+5irmX1mFXvdip4k73hmnOZ9+R1fgiIwHB1Pl2Oz6rpe1r7JQkur9tlS+lCPk031ltrbq0JIn1k9pOj+0S+T6ci/6Ev3Vzau9ULXe95ck9tt8tRRXvu+kj6FZfe9wj5iF7fXTYKbu/Td3Izs+ylTvV13kLu+qp/1vtITViHvtckpeX39BLe93L/EQh5Kp/SHUO/Rsfl7eeEv9LoW9p1ZRuf5PbT6E/rqhsgrkjpLp+lC5LlvDcCU1VHfrx//6oswWDbj+6sqhLwSGyxu3XYKzuh0HDT3QqGW9hrElyLyT/Edtaby5eT1XX+vpevr60UqdEgkM4aT4nK2W8jhLwRSsSRot35YJ7xWG3nMX4+nVZHgmPfJFCrHe8l0JXaiu5FyKd34rVL451yAny7d9ypk/WVIqJIQfK3eLQqvxBA6ugM273f5RO76E/T6yd3dtCL3vfJ706KckgKqek1e7vHb1hXwRYa03Ftl8n8hty5S3i5JZLlshfk+tJf6dHIm08q8n16VnFusM5IgxrNTNRcnw98ZAAAD+UGbABXwWeLGLvWz+Xi1Xi65tPmOGfBYbD11734CPc6KDfGVLBL9YM8i/eFmFl/bfBZfJu0R9z16KWtWxiI1PLcEkwOkQyprLwUS95DpbwUtvXEleXmb/i/HUa3hT8faxxo1sfrZruFfMTgm2/Iv3BAQUYVQoQqyNpDy3Bt0lPnWYbgmzxYT72NtgZXfnZ/ssPrIfvW/430sbQIMb+zbmJB+Xns5xwkiE39/DyBUycCXc5QaHi0g3/RNuqh8VHFb6+//3Ch+pUJfakAiZ69gha03oz1LL8IYS7Uf/9XGQdSW0dfwGRMvu/jcQbYAE4xqZbhPcFfYyjnNK89Qv5gjtEMXJeDPCG77p//BThnvJy4a3zGrtTQ0LPVtraNhHlB0WPgJdXrv/n52Wt/cZeke2PDeaIlybvrfBPZ327nt74/OUf5/u7HpodpPd7z4ES+q3Ex8vJBwm/bBYMEj+xKku0XH433Ybt7yumjssaQ1t3hZt+R8zYz72tHiAFamnabVOQNNDQyHS8vqPb/xhbKHn4twyoKfHuoEp4dcqs28Kt6PJ759t4wq1owgtsnaVFhPaD9tNA18X9OJKk423/k9NsrF09+HyWz6vr83Gznpo8vJ+lr0a79plnjDaJJj274750HddPn8HJ+3XqE93c8Hwm/xk8Pe7y/uk9r7JTlO6vFsxLZQt1aWYpXwQmzcc8n377+KWvfsbrJ+hVCkmFH3I7hZNz9Wp4JL3tCXgh3n3qr03tiaGGPeRf2+2991idW90WbefbT1BHj0B/Qj5j05f8FE2vEkuk+9Swle/bXv36aySw6U3bQd7chASaW95lxn79J994qvY1dqEj3u98n7WTuCUk0+HIdcdOPuvJ6pE7VQj5iDdz9+iglK7bt7p/q67fvqxdFc7+jPpIJXP583eqI9336u968vH6YQfzlp71lsgJiO7u7u+u0ynNd+ie/yTFvdZPa9/RZb3hTy5f1+OIx3d3d077p7ElNV7vEOdp+WExp847j0kqy2MfLTZbqvb4pD8v93d+qs/oneXhMz5ZAD+Sfscq61GHe5fFdzs9y+2/CXiRhffPCnJ9vXC9guO8vd7vhk9/JapFMnRS0T17JrR062r3p6vq3Kg32chba/xe93f6ihG7vCiEi5fdZN73CXip4TtH6Vs560UsZdfezPe4rdwh5aK7LvoyKVHt9d9iS7fTKXbdV9Sk3fxEudvr8lEMm+rHXwR7Pho38H/7v9/UJl8lPx+PmHgfjfnr73UcZxnBbu73v5ZTvt6KuifN16qcWT1sT8ul5JJQm/yOFl+EyO4rGZRj9/IhN993v/WpPuQ97yenXm1pIqondwzqIl1xnLRagvgAAABClBmyAV8FfmHTYcuwjfubNfX+8vMR9/iyvmzmkGPN3HPy/+4ICcabZRlMzQCb+L1rvdwce9/3Gx/J8IF93bx96LnyncpyNtKiJ9bIO305x4/04mlGDPi+eVNu5oc3hS9tOeaHH24I9KCY9bQyhvfxePL16iSYZe+TYWL/e2ERAo18jpnZXjZwhPu7v7T7IdbWr69xn8D9f5NHp5B2XmgXQqXBfb/IZT5iz0c4Pd+4wTto8XVB/XimtTrX4Ysu8CR9LYF7yy6kAg2cv6+JJyLUz/MB38Zx3LKGTBI4N5f8uxpSCIWhy6Z0f5cBJoTur7/o3WqUsI7laD3slgN2dXMe61+j3TR+CyP+vdbr+M3dnvx2z+P+tXvPj9whve8EtyP9v8vHZjCfgnGOc3D6XRLJJZbm67Tr3GkdqKK0CX9g3R0ySSXXpCWc+/GsBX+uT0dhf0olkhIt8v272MLjx1bGVlXaveFnhC3I+PBTQcz4RK3gj8Nz3fexAt+/yzEbB+kv9YJON56O9l6fES3f+MM/3xsWG5R4SjaAXuV4n+EQbVq3oEF77Z5P7zLkVa4z1+EvBZd/l5EJHvfWZTf4LPGXR8dV9Pr2whL8O3D34+D8Z8v034s3NkNoKWRmhLpP6tujoOHnNht2qeJaXiL/k9Jorc+i4FGxvr23qiee6Sdx+Re4bZ1/K15fJXfv6LBF58fJ6pVdYq5VGcLzG5p+kmlZLiovmGklbhHwRiC/iid37ir3vfsS8nttvW4MMBN67/YxbrN5rUO3C/8nttpl1hEmvfPhF5r/KCxOO8FfISDSg0cNODY+8ITvG/RdtRp3aEdrHd1k9tLyfTlivZfagnLDbDD77xbbpTE4dfWm7T24TufJP3f7R+hHwQk5/bf4vur338hRqpe2+8EYokIQvlPQQduCKGvQPe84F17dbKY8H7OwTq31jLoji7drxTonu/9L+En+CrWbvu99vSq3VvGKt8nv/69K1fhJ/iu7z4/2+jogt71u7v9P06XTXY1CfPz/9HRjXvJ6SX0mK7bmX/WPK7u7ve8vl8i/hDx+94wk9Vb/dOkSXp8v+YuOve9+S/fdZYTHnj30tfVhMc8/3vL9Ot9F9ZeSJ19L1gqJed77jQeX38ZqjNQln+QqUg98lwksSE8FetVG6eZ/FlvMOJe979Oq9rv2N9fmX39dKL3u8bDjd64k+mXp27hLwvGTH5e+b/npGuv7FGTvcsnLfxJ61IvjP5hhU1y+nqu2ilwmXdyqNeM9eXyeiexfv6b0tGjDHxjp7RwPbfy+7ny7v04UX4TJe71XdNnhES7+7N99yd/S6fIu/rN5cpaESXe/WF8soh7vJ9VfuYXkh15Pq8rt+lN5ck9f/Jul+ynbeGclkF1cZaZPXU8REXRwf4lxEhQoxC/ov7ncJPBXAAAAQKQZtAFfBX5h04CWfhu0NNGV/U9rzE3Iq+Ut3hjzdQR7xL/7jScuaRTtxHrTnUXi7svotP496X/tsFfLBmpKGyLsjOOwQ77I2X3dNwndLOg3k92xLd8VLDhgg2DtLavoFlF3L3lAw5r8qZPfP/ZyS55VWJgn7Tz5cU2T9fJkFky5nz+U6UVuFX7gsCQXV7t5f3c168sKe8I7fZZRRhm3LQWvEhG66To1ORcW/aGCdi+wNfDjzrwygjG0jqH3WVNiSYenYYBDZcyX56ev3ZR84K7H99a5si/33rm5IQp4w3HaOEbhQfyn6Z9PNyEenbRsFGcfpJ7La1ezO+a2ysKEesETowtHokogg0k+1Ny/JZtqkehTId0wcf8esaK9sfhAue1jbqIrRZRZP7pM3IXOVqsrChTIGlJEfLBK8JnGRFpcN7a9gtk+qr2wh8cTSgvVFeEYNv9Ym+MZP0XbJOo9fQve/P5PpeQpfopRd39DMP102Cwwdz/QJNrBSjYiWbctYfNfOkW+/xd4TrBCtYRWXmcJvpQVa5+0/hPw/JrUve4Of60mh3CLzDe953psS3FiOHsPQQ6nX3NXqzabdwifjCKYf1KRv/d7Gy4qZym/F4B/2O2fh9N39tEzKzrPurrH5XPjk7/PnXl//on1W/Q2H69SCv57SvrvAzYCb1HHv//ODyta/LG9P9V/5hOfwj48RlWcv88a0X/PvJ9r5VslIID0NKZop4pAn7zUOzZ3K9/whM+tEpBLWokXQ2AyO4x8pguOA79bexb301l6a9Cu7ox6O9XWTD6Xl37CZrd/DmnqlKhPkUJHK/vrR6hHoExNN33/VX2JLp6FEE8YJetg9HER/BPRy82Br46596r01fpp/VVs93eT0lL3SFEobW97vsqX+/wRXfaEV8UYzp98rSL/v6J6kMV93Z6sd691WT00sq3Je/ZYTu/u+8EN7+2pevflnX4QL88Tzm233yOJpjeHf47hj3Xk93Fy8xe2+iNle/WTu+n8z7LRO3iHX1CXYmdLPDJKf3v1RLCZM+yyDnf4xdZd76X12mXSKdOjQTmd7x+x8OxMtDj5hH6oJnz7Y2y/CXoV2/bCZXeWWKyFLqilZXlX35NPyWUbSP1RZ4jcv3f1W08t589e00q7Iwia7ve7uBF7J37+J5PUZ/CXRJfvL+Z5UCgmXtu7+9fXL+q/7CL66cVlju/q/Efkonr76glFcs9Ul5edIU8RhVub5JZfsZV2Ry491q2US9exurL005I/NteRs13+LKTf5aX5ChCifd/uOtHwgR9y5duZ2F8kLajDNEb23RO+7vpfCXL5dXtyyasbZy/0+EOT36+g6V3v5cks5LY+pLb614Y8QaSHPm/Z0tJfD3xkAAAAPbQZtgFfBX4sdtj2nEQ2vLSaJnL+TubeYteeWXLKUeDJf/bBYRSjzzG48JruRo9PraaPZk9wVzda/yHJx+5h0JeDFfO2lbcEm8guUW/c0fB/14n3XuCrjdPcczDMsn+LLTn7PGcT3voifWlwr5icEecui/bBAQLgKpxoRg4R/x5UJvslK2W47qZ7JeziLpK/k8ifsbovPz1jn3y+2/QOPNEt9OZsezXizBzsFha3mAaYEb5w1xrScztzvD50ozp4t5RmULQQfZjVlkki6L/k9u2jsrD92YIlQvxcSwl5f97l1sk4Px8SChlRvjv/1TdDOU+Yr5orJ1pNdKYHg+7X6osEXhthUoOlyeq1VuCfem95U9ode7u73uAl/axWvl4GeNGE37YLBgo+33lt3s5b2+mg8S1564qzgSPEXd91BuJXtXGB/9jef/RYQK498GZTmEhtW44Xy550uRIuWrIhby+/fpOxfdkierVEp+4KCcGsJEOO37LtQvcFfdvgEv63/V+sgFxQl4cvDbi0r/J3vrBPe3d5/t7e18MG59zvAM1sXnXjnf8aV4fZzuUn5lmM+bhpTEmxL45c+/ovWBfvd9G1R+T6aLKt/x860COts1i5StblHG+62gt8iCOiUCPdL1PaoUfRP5P1/4T36cdgjeU6bGd8/wn4e/3sawVZy19XZuGEJbuvtguIf0mfJ9anvhA3U4bHRet3zrbItOI9Pps99tVSE9+TGBP91lQoljvcpKF8IeVcJM626O6bDmP0/a1wk9fpLu9fbvCLDsPtoazyTqXhvk9ff2U+ffk+6FdS++rFoVMy+d3vT+Zd+kJhDyGzS8sEZXu9Vpd4Kiu/OWffd6tey9Nvgh3f/aSSl3d7W39XGmj93iSAj3Ty25JWuoR8JZ96af4zbu7y+73bj3urPKW795L395dTd3a6x13d3fve397u4SfkiIIvp+eQW/fxXfRUJLSdxL9+/J+vLv0dkI7+qJ3fWXPl6VRHS7VWNQq73AdncFKc+v1hHOm+jYW4O3297fyFp3CXhMVz/Dr3flmKfL6TKhrRzi1REnpLS3iNk7uqP3pvaWiTE1elJMxF93n8JeK3gl/2rV8v3eeCQghw+iu601uY+f5Pab/8n9flZimI33kvVjdvW/RB5Hw2hbA0hR/eRcu/ezj+Xwn0YkudZS4LjQCR+W+JtLurPlumnqil7J21mRBJ/dlyWW962T3ppiiByI/887wuXydJIKkpJJ4o6U+b5v1F93xa0+vfrc3WuKPu+TPJJvO77LD7k4ZL5F+IMaBJcejBXcvki/vGS/7LGn5wWQAAADskGbj0pAK+Cwv/nlGDUpdp3fuiJV8XybmH7hrw/41GH004zxALdzH76iG2fq+w0LjPDfMahz+H7/PXuN4zct3RhlnXdO05i5Sq4I2pERFwl1oZ/rLx22B8q6PmqZ9V2UFC702W4Qjld7mq6WBdZDMDBvTuqLLBNkjlKcCOrNsW1XiC4yxfs49fb7y8SRw4P20nws/cFgoVu+9ytsmEHKnpZff3CHvsJHprSQUIwNe+XcnpUd+7Pby7F92TC3XVOyoLKnfm7SE0uve/CnggJIFj9AklpjuT/UBG/8FbmqoXPLap72ZVvrbKxpmMPzjVr3YG/0kN9Dcnx8C7fr0gE7q0P7A9uxKfO/L7P408b/z9V54vMnOvo34w8OzutnPTSUf/6Z7rzfmvmECK5I+fo9OUUP/wXTw/y1lBtMWzxxXCL3Y7c42SsbIWsDd73mtzZiVe4m7lTyFN7aPLNs41Mc0sFdJfe028FWHp8vlp8OT6CoWlXpAgMZ0PpE7qIJeORq9aT3lfnKsf/J9db4LON3t3E65zbQopSLAvo9bp+EJv6BOZ/aSe1KmtLe5R/7YRN935suOL1dnRiw3BQvpN8X4ev842UX93ppzroTu3EejNeustYu/ln2VTCXid7z9+/lJe/Qn07wJvyzKW6+nN3Ds7dJbiScbcWlqbBPDF+HdaTd+t91VtsTu+8xHft9xORd2BMpevv+EfBRn2ll3VXoTXakNx1bS8FPIOp2+vLT2MGHTtu7twRRx3aIf2T7yd/vcc39LcvoT7cmET63vd7dKaEn5Wa92Puis7S6cEVxouX79clll7vv2mfghjTX/dFIvd4Kcvu773eoRfrvtMfvc8Pvf8UV3ve+l7NrJ8nv7+62ndeSE7u+9+sJeIn6kQcuU/gmLn8S+K3T9XVlfdOWvpvsJW6Xcb/v6EoVve5/2ktAin73Omu3BIZIBBq69b7m3LxO7vjZQr1+MPLz/d3e7um5fYS6Eip/y/v8xW3L+10x55/d37v0J6TLI90aWW9+/qxBbA59uct7EPa0vdlvftTCCwhtJC5PrdikldxlAQfsJeSmHFbe79QWEd8u/cV7/iTuieFEUmYXPQ1TrCe95v13Qsr3acdKD99Ncu1whNV769tkLqVOn+UYR3vbpu73c/f6wn4jL/j531+CY127rpd2WUS8KPutV7/JyftvnkqYWk/ZTNf2iG3nutySEe/Swut8EZn3qTwmJz55pckTN/NHDPkl5a/JJWtZIgue5bPeCyAAAAPTQZugFfBX4JB2PvzGKfi8gvVWfJ9JCe+XMCyS699ZOLjfPztB1+sJBjzZJOpZV7hIlSSXEsCD4ZuBvndApfy7whvcqAxEfL1E7vZP8Vztd5XZf980DpYJN3+EL78qgNQV+vQUCl/9Qqe9+SjH1pvyx8mmlhmtza6PORpfEkNv0b/KccmdK1puBV/YRCAHcNTNcse/4yt/tNC71xL0N8E9c5i9lRiNrX/91ZGzVLPLUc1OpLf7g5oYXGp7UbPEL79xx+7D7zn+4F+BPTz9idJO4TJgBjal/m+TfT+/ToqVVq77G9f9FkvvJ+v+XLkdUJl/vcImFY+oZdFgS6PGZ78Uh4VLcv93GGt+dLdaCN7KHb0P2s9soP6JQW3hruFDqr/9LsIlSlLcjryAwXnRO6o269ExE0t4ZduW0yysTHdP73j7Unv5PlLlUkDV+2I8MpJD/e6p/zZE7sn9aW0Cw0+GDXwW4cs5a8Pf5P16WwUXcI1grGROcMzqhLwWUybfl4zVv177+wpffdhtJh7z794bSunhvqxkh3TbzG9u+Q1dIlBMH76rPCJkLklw7ltI6E37TYeKe9a2uHn7nFRL7Hv3XsKWXH9Hr/aeNYLIJ9v2v9Mo/wBDX9m3m0v8u8UWG/FGMQ+7+8nq2/cgkl3uf9uN1tZJKfqFLoGahlJF6vlYMrS+0q34+EEdeGFjJ6q31Qd+HxF0GVt77kH5vX9XzzsRqsbpvJT8ws/tQmssb0nnKICVqGVdG7H/Z5inL5QZvFmpw/c8ovyx0SUXng+2u9m3Xei93kwzH9Oa6T9ZTUw90u1T4R8SXY3u/5sOPfRC91vFaBNw0YH94ZRZiu3694y32/RPf10qJ2W9wkX98sxLv00jsp3pkuX/EJxuq+qL60S2lzUWXfqsQgzDZnd9R/v/p1120S73CT/F03b3fat+qbcJHe+xv9Iz9jZjvfv7Ut399Xl7vbVYIyXvXkYu97nXwmsvHXPAV3d8E/04v8vq6bSEksvpbra9SEu/q7vdeJ3one6J9JNbuCcmka4w27c/LQEu0w176qEfJe/4Lsi+XVFet+oru739Snd0+ivqux7Ld3pe7LuSMv7pqJobv0nk/X5PL52S1gk3eUH4JBGPFAWyp5ImWM7M6e4TL+bKdiyZtu7v1KcPIchmFB6dLuXe9LeLKWO+WPVU/9qde/c297yfL+lkhIit16GUNe7hVfglNYagq3vu9b7cFAlp/L5gb9PsparNveX9XyFe+ki5FHbetdEwst8Fwq4rdEeHFutoTvcubvr6+v1YmqPVZJc0fyQI8AAABBVBm89KQCvgr8WO2hpoljLN+XNi78XxnPfaDPgoJcEecliDcWMqrlBEv364UpvdsJwWWer+7QfSaHDeywndLyuXeGOWEA2p/48uwI0eHcj/pRPBJKgzBdKEnr/dFPe/wjvISSdLLgIm15v69qFfMbcEZWxivcb4w/zwOqUkgCRfT4C45Ft8WAn3u4I1YNt3rx2Cb3n0Em3xhei4Jr6gz+zLEqOc4l2xpMZIX5DbPMn733qmUNKwY2nPwJptinQzB3ZAlH/Q7SxFdRma13d+1ufO1pr6uHiz/uNEw/D2yB8vNkC/blHCPdj9iVtcDb7rbQftZ4TumZ3F2BXEHM145M6KE/pNNxzhvEP402xcw+a7dP7YT+vhuF7T1soMX/HA+Od0P3P7NP+jx3dx0KHvhpJWwcbDd9p5Y4sP72n43V7p94d27z5cM44aDcDrtScWpZa/pySQYUHoy44RejWjv1L+qyoZvdxRvzRb9736QQl0djPP5AQu5kO99q+CPhpczlSEy/3tihgXau7CLzcvbjr5vdxhL2u+9K3juSOqt9VDJtX5fQxdYy5dy/T3gqLDfhtP34yVv+eQdX0ee/t94Ivcl92WUss5M3+SSZf03v39+T0s/NsWaXGLIdoiDtfQK954P1FDRe7QcneeEvBJd3+31QJ725bP/h726Ecsq4SP/RhGPsTLhSSf19jWJLnnKHz5sC/5adLfvS12Je/k7tkyEh4Ycv29OJI4u05Yml96uCOHhOzbl2bB+xvDjpBHwsKuO0rb/v8kJY1p6aPHseTlXI2xp4/76Whs3hHxPNLikH5QUQaGXCiZ83C9uQGH3eDLykfX9flOV7uK+z+hfrBHWP917Lt7bBbkH37n/2mypw+bw33xXMQZF3eqw05X8v+VKY7eoS8Tc/929fX+id+ScqsFs+Sn7L7rv9TFcvuEehRs9EhD9zxvVwmUrH0r9aLs6/y73v1BHd7vT1fsvSa/WYt77fbW4Tve9/ahF/mNd/bH5civN979aLqt1aSi+6N3fmhMr33vaZ15ParaJ9p/CPhOxub0nvvbUExnuXH7vk/opMkbHCXvpveWHZ+T9ZMRdlJ+l3lEbvovSZYiYr37spH3TXsEhIEupFf/u3Xtv0Y7HI+EvQrt+2i/ye3+/3mE7uktkT3fvVL37o0X7I95taelBIanBfi6Gdvfd6AnCXgiKSIl8If6JfbChLd2bVt7n7jvu2NbtFv9md/eExKbvz/vVP35PbvNv1f9yfKUm716ICKUuyQQccTXkF0nCj27BKaAkdPf/j/xu/qXddF9fXk9bySfa9IWnNOUhvckK6QjPm7d+SFTWTks7kyd7HaX7/8X3fd9Esu70l/teZsrel9+Mxoa8QStHNS9KTvjLJ182CyAIRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd4GIUtLS0tLS0tLS0tLS0tLS0vAAABE5Bm+AV8FfmHS2a1DL5Y9lw3+Ls6kJDTR+T+/LLL3KU+Xc7n2GPNnHY/cl/9wiQ+48XxAuU1YAm9T+f0Plng+vwxGI4tqA3hP77qZjfsMLYXKDA/2n9wYTXyW+TSpRbX9nhrJuVO4zd+k/C9OaWYkVRAKL1jWvye6YmXaQk9Mr+reqrGXpywonJb52LHQeu9vKR7OFn7YRIFwBUtlj0pYOQm/U+GZf/cbKaDBEZk1OLzDo+yWyCF6Ul/bKI+UfJD3fRfWwJZfv3ElKw+yohx72UZ1+FCx4cg7IGzeP7lwSf1mMQetwQokVUrELHT7fiZEsHbL+/hAhE3AMjvVbK/3VmJhZP54ZS9ZP6/LCdJvhhEp/3j5O/wWlMChilgXd9Pk/Wrasf5TTRrw4X5ff4Quxn/b33Wvxd7mDWURf4I9skrF1Cb9wiMEvd3mZTU6E6xfy3fxpEKzC/QxnaWVIHfEjXaKghA96XAieAr7nWJwkp972NK7UeqyeRa5EaxvaZUON162t1Fgl5oMPbsJGJxnOvjnPlporGFRd3BVHiv8/hKsCYc9Z6fxdwaG1J8H7mS9pPvn6te5Z1H+bw7vvZ0CM11R21jt74KOAk967vbjdnaEvW+/cFl8Ivvw5o6LfgyvXBdLJn1zjLR3dYTNk13+ErUGPLwfBN58I/puRvi6xheZc9MdLTKT7a2Off16rcE9rfcpHKsvW6kLaDjlPRUCUixo8sWQeFMa9yFxSRKgr9kXcfaFin1JcLs3e8EJeCHI+7tcuS43j6X8Kk4Oy5+H9YfUr/2NYKy17vuGL7W3kXj+SCszc3G12h92hLD1z2C+qrFn5dd763IF3lUaoEm766cFJXe7njtB5Jh77VEeMgRP7ef5V4dQYjWW7veHfVrhAR4P14dSIc00CLufvrhHwQltz91aaPVUeCURhLp76nv11QJaV610I5ed/dfWTlX6+shTlzxvs/at4LdQLQ6feWkHYkjr8/y/7XCT9oMy9vV1r6sf6rr7L8jZcve9Jfct3eiem0pfmJd/lWtuVa9CWWCjd33fD3Rcu01sd4JNt9aSt6sapLzbvWTl//YsRZ4R0woMckXve7u7xXdvSKJe/dCpyTcHb8RDXFLsb7yk3ff31dutPJ7b6djIJCVvn01mr37O94SWcfizS/e+6ksF282n815mT1TLt3KJalX7XJ7d1jpFuxy/GeixRY3Ody/19/cilBtOsFgjd+WjvYYmmYWT3/rE9xlToL4TL5lq4y98rD0j9vd3e0kiotycN5uu3Hm8n6yWfiz3fD0GJ+xsvnx1r39/ZJe71TYiII+7vZ17CnQjL/njrLwSksfnHbnL3ra2kQTu+0XuX2+/v6NyencUpiohLvhcvkv5Dbv1BHxW6Vcy0lqU93fSXZCAmlvS3NpbeKlJ9QxkiMRzNspHL6m+I0i4W2dG1L7if6zcQc8x315NvPkTctdXWZRcFcAAAExEGaD0pAK+CvxY7ITWpQVD+LzZmsm682azmOGi/+WESTglJ7JAOQuBbeil/e9VDfcIQu/Sbdqdvz2UksddAsM713CmXDBbeidgm9I+c9Ew324+99zwsBD9S8X+oL1J8n6+JbhDl69syBoqdze+zz515WHOWy5BrLf/MRy/Cvm8BZ7ZOl/9wiS8MjaJehbwPTF7caM+C7asvG0wh8fdXwdkAs98SBN66ttouEwukDOlo8mGS7fsa2rly9fxqPqugqMLWpM0sO5q/8v2/Y08jb/RFX8LsXm0PbgWht2HX0rYTDCO7BQXzwvKekvohFqbRyrfL721jCBBxzrjI/rrE3vnye8wpusaXdqsgaXjRmBGls8ZdvaGMCbU93xw+syjzXsLrGHcnrddngpux+9NWe/eUVLat8F2zE+SWRvUIbhqHL94bdEP8X9oaUZmritDbLsVlbMKO9G+XHvf3+M2+e354QksnoZ87+pbf5eH+KDCb9sE44Lgqezx7+9uPokt+4wl2nGIeLiU/BNi2pdvZYSVuQ5Q7TUPem4ws3EOw1sFb6K2H1/s+sC8Jajxtk+0t6TwZsaCHxvb+EClb+GJfXoV8m+cfqrFssgCLDQO7pyXv3Xf79CYnu7v0kmoKDYYuib7yD+GT9X/Cnd72uxhF/nbG8JeG/Lnfk559/ir73cbi33BVdaIjrDb/nez6D5KdLQ/juHcifon5nhMeN++6twxKF9NgXBZEgNb66RJJ4fouT1S93CHmq/IPjYYXnXA70N9eqL6vMe96szsKEqx/R0aEZ69I3O33C1ODJ6S6TiIJtTiFvXDokhe/5Rd7hPlIQYI/6Fw0KMaDmNa+dkN9z/J9dW+CsuBf5JkH07HEU1pPJGGmBYSedWmRdH/nN28ONdRYbET/2SUuFr7+K9lgh2V9evqCM8gXb6/BLc7PK/1u86CIh7uNea69SDl+38p92wiX9U3HTMv7aatqn8EV19dlaLXTe0qcWIgjfU0m3by3/MgXFD0XF/dx19Vp9xNKwuAtXf/N3en93di/FXd976Gkckf6EXfdijYmk+8Ed73N7H8IL4hiHr77o/Q1hqT7FXffeX970R/LILll96f0C4Re93fEyfVf44t3d93u9rd9lVeJvFcV3hHbGEP3cVvdtz49237de6l9oEV9/3Q319nq+k60U6Zf99E13govfP/+mloE/d3e/a9YR1BNe4+f1dzb+0xJJX1WNKbxgqGlTVES9fjvf9E7pUmJuCQjmBN8vJCBXcbNDcCHdK+zt/3bv9G23hJ9zmI99/guu+0Glj/T5PS+mmhwufKFyzeMof7abG/wlKhNm8vf73P/qI7ozS5zd95L6K76Nd3eT97tqZlw0P3a3f4fFPve/CDgcwvD1F9apO8eB6wl0TP5fyII5q777vJ775JIJuMrj2whA+8kGNThfrv/emEIk73cw7fJ7tFl5PZ+q2he77vrchDb35OtSRBCw8/8JnDhfThPxUbxsQZp8v9kkglJa2oV2hQfB25dtSlcpzVvruTdtL3eT22v/tLtNFPa+gjj4fe7y47/7m22PhZZOExDo3cZlb8+xcuOKxWnc9ORdqCM93nToqBdffdIdu1Cd793vESerZw4OX8CPAAAEmUGaIBXwV+UdxuI+GJb1rZr+ft3v1uLlx+4367/LzwZhgv/2N5ojE9GGV9TxWSPXkIEra6YGIWi+vbv/+HoMVfdN7jP/uNgVf3jvXXCL7Pg1NOBgZqpXvv/+k1H98buCJPv4ZFfPh/EHlfPWNlU1R8bz0e59pNots35x95++kD6q/whz6f3hJ2qbwRP6xEuah6z/5HdmrzwuR92n4vDX3/FneN1Twif2S+pZWSHMlh1JFv+aa/oIEd31zRts3/KcJen9wqX/ywiMhN3853CllGbgI32tZsvI/4juN/4I8y8Py2wwiJNwyDS4O6wRp7z5Zv40cSrXcLskXxaW8Of8fxL774QLoJ+0g8+PYPOm4INmZ/+4QJ4JL6hg7YQ9Z0HmbvxRXjk5TrR7+bL+qtr3eruFH7gsECXu4CbX33RywH3AC/q//4srvnxtb+woQwPr8eaRh6mzEx9beSuouj78x16g5Cqdrnvur1U5e40sheYZWcbVXpn35d3Bryeq/O+K/UkXffgxGo5oLbNBK3EKakmL/40rjpk6nV9Lwg9NoqFcpysjHxJFcS9phtMwZ1oVMpyNf/4znTQXu8qbmX/Ht3J7mbdXY1j74+cMgGlPfd8v7qV/gr7vhP/EUuOrzHb6CN9ykb/vf2buYNWp00CwzvyDMaoWszZrpHMdv6BRd88A3J7xu9btCXgkJY3mt+4Jb737bevwR5N52u2wVcxS2aes+3lHwYK95b2sXIuQ4GiLo3x2lP4zx8vs373jOzprJFm0oO/grO8g/dXjLyp9hdZ7NhmX1ePbG77CXQE8H55FT70nBT42+/cdGrfmnHAS8O73n/cZpyiuy/f4q6K76qisEREUPB8FnaTzxhYyBt/d03YEUkZv7ca3Emu4ZlYa+dREb48zuJO95R9oZRKdNZ/uCup/yr6e7D4I9397ghLu57alTjM8XuG4NZsdRO8aKjdNCZYJxDzqHCQUXmfue5j8vCf1uQ27/BUJM3Of/hPx3nYGuWPXR4IZWP3WPsh9fuYM939V7rEXfZlo7ztU/k+vXkBJe+UI+YVFX3+Ezm3c/6sbyfvb+Ez3d3fS9+s3d9n7zItSCon1k30yOQPeqXulBFlX6hEv/WL0r07/Hle7u25u9ZYP3cqnTuXrVjyFe79Zr76+37p3e8v6qvYmbd32RE7veuCG99QlzgpiHmfEvo78OYJuZbdyYk5caef37ySh9+lJFE6yEffkm7vrd9dYRLu2u3Ifva9dHRd32+KihF7UJPh+mWm6qCSv71wge93vO+nCS2X9x978S4GV2SL4X79PyDC6urHkiVWn6HXWnal2W7/Xr2spvuI3fy+nOlJe78UU276a2t/ijXfcoWHdf467u7z/d3CXkw66zp5dhHd4WTLeTDHKqwD+nIeX+sXfV9+bWTrd9/JpK5Hd+T15ssiCJCjdkPN970r+sKF+XvBOQO1O+azZBuvCYksfd+y68pd3tROuirapk+T5MLZI8VueBGrdmbb8kIUtJ7vlzyeyyn3eT7W33ye2m9J0736khrJEZSRnamp1ieCHy5M3iLPnyvh74yAAAAS7QZpAFfBX4JB2aiphl/3PLJM88d983d15sfZ5DPi7wh8bAR2l6deveunSrKr2xksr+HxfEHpP1rOT089WV8bwO1GjSSLUrthT2WBmhTObGB5IiqKTSCqAg57RWf9F+vaBBfSnO8OxYEBVu/Z14tmb7uF/j+nLFk3c7BXhHgZUoqMLr2eMPcgnvfhY3+7ln8XerXw9KC5fyXLFkvd3uFl9gsEXuK3P7bwzR0x5Y226HzSLeYmzV70mBO7mEghGkllCyKjcv8O4OYu0Q4Rd4UnX4x36CBdi2NvBieecJ8dOFtCZUCalkz3cCW6gYV7Wh5yp1jskdzOUKySzFaSXe76SrCN4+eOG28f0lf1k3vVe828v+V5dySwn4o0OrgDqyjZ/P3ujeB9vqThP+0a/BKrnV7eNIrp9nHxsSkrVGQ6k6fR51QKJhqH9QvyllFq7KWF6+xpR26e5nSlPo/xE2z0w3eI5eh1WwXcmDX/SWWc9YGNCy8r066cENMmk6f77ocXax0VpnlOLXi9BfFV8FFN4J9ib6aRx4eHakr5N38m0qJgqJe5fSPW6Ib/D6IRYZMj2orDLSpqGPEPsDdpbg3TEVJTtcszLYec2kEvBQSXzRe2UpI7/BVI/v/uQNu9K/2T6rdpwW3t8n+yekr+RBet785LA6n6vL/HPWruNrij9E7NAxmgen5nThB0c070K4O4VoOLP/r8LfnXAV9dz5vfcio+zPlTiWDfy/7uCIuNR5vr/y96r7EkyemmWVU4UIUsRVKH/WhkbuQfG8dy8o/2q1G+07sg5Lu+5jj2VbITvW/uGpPv9VkhMbzSB1+FnGEfKOLz7dbfThoiS+PonX6xB61cJlmGM/90fx5sfFpRSR2MH3hiJaSIH0xNhH5TqaWQ2j9S+k2Nu8++y17srKWORfyNe3WWCMQW079uuJhLnElu93vrKe9/fR2YUcLb6wSncYEj2j65/fte7Jdi9t7mov6IV7uEfBIaf3i8sYd3d8jdv3Kx2Lf5ivd+l2LYQz3u+73t8sSLe97+6EHSn6lvdmkrWIJCHznt3fZYIu79t0uEfV9/QUKfZ+Xvuf7purn57X4m95I76OgUld/N9p1MiHVhMjvd7+MVeCG95al7k7urKwjeGly7fdevvJ7aXSqK7l70T7VWnd7hHx1W9x6nk/WW2M27j+J/dvb3d3l982cce3P6Vqln2Xb1c3jphb9jYgj3vvo8tt3yfq6+W+8vq9L6RPJetmvfShIs/eyYcf5PXW7oQW95fcJeYQfy/fuCS98tfZRe4bv30eKyv8tH197z4ZV20iqXe9Un6pP9IhXvVKCcVd94O6gqZP6bUnBNxwWt7pz0JeQsJeer8+ECcuTwxmqUzm2T68hfE2qs+nBKeUcRnYWzva5Ikrvvf192Ld/XuifsbZn3vll300ESDZhgPgm5n58a7hTyT/PgY9fpAqNjfE3XlaB7393d3Km7JxQl3P+96uVLzf9J0Jlu18nV4kmq6Gb3lApYXTfu/kwr5LlybfhA0OL7fPnMbt35Ym+U+8X/yaXkmEu96W8niHPsp/rhfyEjuX+I43E9ZOHNNKs3ES23PIXar8EvIRHeLDmcVvJIWP1HL4LIAAARbQZpgFfBbuUdNhf17l4xV/F+G/V3DK9wQah7eVr1Ag+D3p2qeO/4dSwi8YScN2YhxJag/cIQnWvpvbAaE2wtj4Onw/dzJ7u0P652V95fk9q37LBRPC8o4zOL97F0WrqYkNQ5R4V35UFzllbGQnPFbf7ZDd71cE2753RX+II1fhq+LCte/sFhguAKk6p7neGhchT1AlersF7X42RfvzHhfGraJvP2I/y/NAJzy7dxbIfunL3LptKW4bAtdRm+rDa54qgtMxc2n5PbSO9XGFurG49UXnxhPfcAn3U+H86Bxoe5H+n5Pe3p8PUxJSaPANN7tFg7DShD5bUlbS8odS+WIdwn3/XTi5wehLRWokmpB9EwS5Pvc/yR2G9D7gh7TSK9NHlha8qC7edJev9Xta+gnktFvH6v5eGUp2MJv7BOKFYJH0k+vd4y62nZfa6bGG3/xC6xqe+qgiCrJFwxKN8nqpbvQKS4iq0NOeP3uUQnfL6IXyMdd4Iud9hosm4+d6p+l68n67fgkICfyxl1j++9oIy97u/lhPwl5pkN9eWEeOE5y+aK4rvv7jcIocw0uEb/ItzO34bhuXqslP/BVhCpn21Y8oloPAvWsKLL/vgow6S8oRqBgllxY8bP1q2xrGW8wVmGhw7zCQdofVFi3Fr+Bf0djOvQVq/BYXc6RFLb3e78OseSYlO+7T8NxL6sk+1pKCmH21UeW7i5RzjZkYNR3PwR0INu3enDhLw7l6Vv37otfHEHay+sqrXgnMZcKtZ0KLo6JJxly8uN76R+3xiCJJ1kvLEvuBde7669+nrwRiWrvW9a6LBHHammv20tQUEDu3CHXnfdhJF/tr7O+4R9ky667Ym2/d9EoVhr3qsQV37319VVP1ghjgff9t2uEfBEQ/tXay8EJ3P32e0GxIzKqt1yxX14qOVfDUROK/0b9Jf6zbv19nlLd+shHvpcknd733ffx0IP0UEW621lsgworu93mQR4/l4rvu8vrtuUrv/BJbvSLRaIeL1rFFe73SLnUIbv03fe0nrr7wh3RG2vu+8lfsxIcdj38T8kIea9PfajMdroXRu3Otpzf3Lv6BMcRlwy9b/aYNiS9WUjx90emyYz2kqybT9e0qXr6SBER92bVcEO5KNk2X0kdcp3fCSxI/BIIfLr5PXJ1bW4u/2tETE3f0636Uvl+q0uZ9Vk9U+TsOEzwcqf436/kBMVKUfAY+2q3PqRBLyYyY68N3GQkBI4Rl7jwcJKlR3PL2Neki2uxMuH7R9/fk92xM1/pF7fXaQevuYBpkqJB2Jw9w33fVf32Rwn5L3HixrvBSR8LuHBLJ/Nmju6fCJ+T2n5opy+SnIT395SrTe3xkppt9r5oQLd+75/v7JcED5+03/fCxfvNNHiHu+7st+mJ5JXv6lK09KlNzW9LeL3T8c/eI/kjjzMPdzvsiIcM5ESSBJn88iCs/pdI+o2Er45/irz7XWCqcnvx9+nzxbeIwWQAAASQQZqAFfBX4sdm65M/LaNiW/LySO75i4Zy1hgv/lizXNICPlerc+d8c9cv9u4uk4YZuRrgchWkYuZJ9JOXufrG+/yf3+J9iYKNzD3KDxlU0S4qa9wTF3LDkOkthuLx5YJb12+i5S/7TwsvcaIveOxh3hI/vcEg4WwkTxWe/WFm3AaF+Hpj5fLT3G0cP1X4XFxBtRVNyjssnhQLPm4Ba2u5ZZcvQ3YozyPcNH+yaT+I7otYTpbhF3aGEK4rW29ZxaT9XVvGFE3UOP1w+51HmS0EV/vDJ8PCD8/cbCXstHUAG2/Jqt26HbwjbT0Z4eHR6g3CTEVvO0OzWh/9mlA3QjzZ8whYYQvnOlfG7omE7Q8ixE2La0KV8ORbnjrI5cG4h20zVn6p3BTSAI99v/m/V+A/ziQ9pb3RX+M75vMFRsSHmC7m3Bjk3cS37/CGoZXkfxm9xvJ+vsZd3d4r26bzOf7jc+SOd95e5q8ELopeOxaLVX/5Z2zH5rCZf3ywiKFbisfKzhdnBWr4ttYvvChrZrZzoN02sW5Benm+Bmhey5bzJ6pFe7hQs182d3YiOf8SldkSbKJbcgreHYZ6+T9VvcEd5vhh/WqzwgWH28MQ8bxl3uQPGfv9OTckfX0/Ynk/br/J+70koLCLUqFQIqp6cOspoXfEhbWudr7DEoat36Xjufhe4aRGX9cnReE37gpI9Le3ef9a3oT8JdPvcPY99VR3k9KqyXKQoLlC/mRbaY6CTPwXHMDp4Z7+9xVmP19Xfste6XpVfqhBOeVNDVe6doE3PT8PYX2Qp2CE4QbKvvpsXcgoa7gu3CZRwdmlvW+M2afFIKG8ZcOle+pH57Zwo9Ew25RLxK3nLegZ233gjJe+PzQRnyvDCVlFl/2su7/rWtcYZ3vhWW21n7ku/aFHROf3L5eEeikvdLyyiXyO3emsi/BLY0YOD3Zs4MU3WT0s38EQq98xvFJcv/ui1q8b7G9v+/8v/yQj5iXv3R92/xIle+RYzkT0/LeybMv6/BEbd66F+iRZbve/5DPe21RUCEsaEj9aaFrEXd779oRvfdwk/cVe97u9a3BId9SpuvNe79xJKV+f/CO973u7ptVk5Y7W7Nmf9Qlffd/sST+EPIa7f4TJKpgmPr1s699ZW0OFlYvPMws+8V9CWiGTrLIF6e1tdUmeO6T7u730Wby5sbLfotHgku+VPp3u+1ye2nqZ4WNlycDHJfWo/L/+ET7uOTvbn98JeCQRu7ZPqk78EhS5dyi7L83rIe79fW93fm9U93fVqW7feS4RIeHu7d4EfWX2V2RiNzLW94TL+3WO4LuTD/LLOfbv8Xc0XAXoqnL54SYu0ifs/yRJ+awRtFK3Eu8iF5+3d3eT7XEFEu1TDTk2bdy7eQuQhingGq/SxnCnk41m9XWlghM+WIu3EnpbaWX05BaV7dkjfeTy/1vySYB9tkjhcvqn48iRzuXTr3DzmPpnhWTi8r7mvaSJ9J/4sr6n/y+Xk5fPvNGFP7uY/33L9y5epE81yX9lAG27dTthH4ZxEggkDU1qIKfukweSZbwWQAAABOdBmqAV8FfmHZqEy/L9/zbojTr3F3a92jD4Y8Emrs0U/gr81II/G9Jx1lTepfvwl3ZS17Yf46YeaWGhdEhJ/Rf97LJE376N50rS7hKw5NHZvDDmyerZ6n2Ot4USs1qFv5xerehuUWVg3PMgKifazC9Ram1BNrvfV+EyFFuQLSjp2GPk/fXxh8eP7qb+oJdWp932mdnku7/ibBMivRvit1/LeeEKeY3DDhL/5YIMEdZep5pWSKht0vZwWlyKUYbDLF1f7hT9fjVABKvUalWJx08dDOaeYu11Q9mjd1rwn/+pPrY4t87HWBukz3ix/usreuo3b/D+iqye0p6WoU+6NnVrfe8SrPnr4x5I4E3s+T+ilVm4yMix24BIkffELvAw8HUo21pnDNJgG8ivuEcsSwTM1DOz1AlZgc48cM/w6UeYsFPpoal6eRGeRq/SVGzN85pO9a4z8bVHvDWiQ8x+pnU8WwmeEbYSfvudY7cNwXRvTDy/Bb4Py71vh/XmMBcaDV+nH6llfxnpl9rrEEu/P8v/2U8yAC70d39TCb+wTjguq+pCdPBteMW0kHcP7re02FDWmXSsXEH/+ahd/M2d/nvm3/HbGnW01X4K8LPltuGc6DIjNTZ/Krzl2/BGfDDDu5XiuIhoiF937oqfTed9YLcV5CxC747aVTPJ7pl204LDXe8oWcNy3TB1/qx76oI5YvY7jwykU9VWz1YjcI+Qi03+a42KG0rJ+k6lbghxspSTtJFbrn3vh/kkrSU7z68LSBu+zTDVuP5P2txrLLWYF9FgrKRO5cKC97nEP2tX/BHzpnbVerapMqBKRncxN74X2504JtmbQdYCNXhgKOi8QUod3ndKG9YXEGNkWyoSk6HLBbN3/3iSvFL28PYep8UhRsM2vaHkUu3hb4/mpLUUJ3fGTFXcERHlXzdjUCgsfZF7SV6eqq1vk+1bOSkimLIfY++ThHxxW7G63y99VlO9+7ye//gkFHC8gdfD8EIkw2n+VR4vu7v/BT2GNnh3ujornqfThCVj7vTkul0gWXHW/3ve+WT1rfx2543vt72kW4J8vp3d3a6bKJe8I0SEBjnx7csHt9+TNR/VdfYt+nWpRZWOEcyGQuwD5G2K7puHawKvvXS/Yt12Et73v3usvab5m73yfW71iZiz77yf0MvlT3d5Pq8pJBPJ70f6m3uEtsKEdXTauiJU93yBZ3u3aWUTdD21nknNgk2btjX0uZDtvu/cPwaXk+qy5bBP5dlYs0qvf9wVXQ72iHOehR60/qJlEe401u9qeoiVA+WjsmvfYuOI97nhD6N35Pvl3UcTT0Jx0hdiGnbk4LBNy7u/Az/PQl2CcU3P77vvzwiV0T09d0t0Qr36cvmjsXJu/dPzx+ykN66+zrd1ghnzim7/JBII4yhJ3f5bN43CRfr8hYaHD+gkS3XnhJ7SV/hC97n4rTJX7t9pdGeT+xnWyicr6r7TNyb0IqmPL2pXQSICb9eaBtyzcd3fksSglwvhPyCDwzs32WKMUUdb258Sp/Fi8t7yv2LXkKST9NJfdDOE7aelvXTkonvyII4wJ/cvueGh+rpulfCvSptaWCEjoncwipXu0Czmc6V3YjCV050eyxvvfr1ku/J66RGtAuK+aSO6aNfUlI+M8M5JCGpfxEJ8mT56/fLj8R+oe+NgAAAFIkGawBXwV+CQdHHIbJeUVeWek15f/cX45Bdv8XkLmG/l/bhgv/uGMthm64AxPSrbXhDKJv7WLeNf/uFITp4rzw3fISr8FVflPDolI4q5FT7h25pBG8m5YbNF7XFpUpVwUimyIf5f38Xvc4hAC9U3bvrl7p9wmTdvLgdg2XxxXc4vFd9JHcf7PLfbk/f0s1k0ihZfYRNH5x20Evd3KZeWz5L5brY28g1terfaPaTH9XDj23tfGBA08+9q29Q1EH8LVCrFbieqGHf3CJgIb/KshyVHkmMOOdY/r6GFr6bhHjyFrAtdbiKwaXo5ZPlx4wWPIgvy68IZQExL/cbSMNzQgT1qmf1UorUesEIRxgn8MxWWglH/zLNNGx1E4/cKYe3ZhbUbgzTD/C2Vu5bVwxcbsm28jfjLkDWN4EN+B93n/fn+OA5vyfVZb5p0CCd+SCbh6T+N4747gXjJ62VeeK7vu3e+H7orl974CIa44VL12w1H8v/WW42r2Ey/vlhEUKN3fjKn6vxbV+l96dxhD9qhIvsQpFg7tRl/5n7M/HS3Bc3ZpG2i0npKV7pB2Tt8fdXoEW68L9tOrZI9EzhdQj+a/h3Uvyeu/qLLhK4uXu+iwSyTyx+WJ0k3ye9NVZxJDu96vOgWCO8MpP3pvYOhl0oUvcI33zaGETuYP7hLzSa5+mev8FVNGta93n7yNPjgaVNd4U73DS1BvQ72dnueF0xe5U6wRR4HNfR098v/bgpJ5w1MP90MxFoXlAMGk3FsJdyIRqTUvvrboExz1e4I+3/Xe52Nj8k7MyZF1tObnXq/6LJ3fXvI2hxONifkvPLYQ/+oK+2g7GbJZ3QbiInd4S8uja/h5Re+EfCmXxk4P7Iv/fy/1rJ9+MDAIt797bvv0hRoZlIFUT+FrY6UtwTlwn8Ct4eTJH1t/Chso5eXTJx4RfMDGd7ttRefx9K7o8Ex0bfNuF6MMLqe8v4wTG1JFei/UJFfc+fYlE3fvBPcPLke9zBWtNFQiHRFvfEp65ytUrcz//bKd3l87C8IdAkDFXv+bn38p7zhpJ9wTinc4/hys8h5hdLghE1pan/1ghEKyhVgy+/m1bfSlPu/EMEWSV48J+ExDv3l/LBaer7sfXR2xO7qjxWRTHYV9LqzGu/V27+ujd3vto13fL9Lp/IxOfwi/x+eL7u9z976ibbfd+ryetWT/RV1myEZRBk9Ur/6P63Iez/4S3enf8FF7u75Qfu+/x3Td7u5e+EfeRmLdZePIWDuJWHYW4JHxdntY33RRLmMvCl90ndUj6lJuf1vijvu9305SFr/NFTC28x9+6/J/Rvn9CUSlv60peONTAKPeZ5e734iZf48Td+7u7nwJLOE/ycnqk322O3DsJTrG0bHbjJzontUL7xYnctN9yVfShO1Olu/pVPZPruXVmd3eT+nTSoSW7hDf9Hb5NhMvsvUokmK7u73UiRbjOR+nSSyev2qlFoc/02NpgjM95k7UQV9M9fvfKd2vY/1szu/pgkt6aXq1Lb6chHlb9wnkhrwwv4yy+39d44jv9txyczvR09pu6yoIC3A0sS3uV3cvu09RkfEa25vd1vXY/o12973vv7wiSX966byeml5JIIfFZUyeqVaWCEr2nTm/wnvd3fyOFfREuT601VQ93fDA0K2NomG78X22380P8K2VpFs/xnLY+nN9NfughNIUV/S78RL0iEuaTnjDOSFZzKzLyPDL06+O+/xGyCuOedT3ur4yyi4i5/wWQAABQJBmuAV8FfgoGZsCNqLWOSkYqX8nbFy2HZ6jDUeT5q7K/4vObO1hpcI1F4vd+aCNdwuX/3CJLRNQJvZy5rRl3Lt2/X4Qzs3KRKDGg68Ilks7hG/MBgR7Nyia6PhwFucPsEpfL/FXalonY4Cf9r84k/TcrLLG5x4ffG2TJ7F/0CqQHo0WYRT/TCj2xMlpek0RKF37ggIKDEMA+PXuX2QXX4SKaW7v/wpX5bgkQvc3jG6puOOYVHcsL4pJaMMBd36/+7Vb90rW+bD9vS1qNllxAQNEFPB0ebW+1ytrexlgQW6qBZsMb/7ALeD/OLS6YSLn75fdusOxyj08EQ2PaV34bkjpLCVnUaRP/VWVDCuNtmpmfzOw7TfMmiP/p4N4B9yG4dDKGYGqS8P35Yc8uN47j+n+T7Ul/NOedd2m4JeMhacqd7lTeW/uKxn/eNByv5S5xEI9IE39ihgfFoM7KcC2Ci/NWCvtwJv6yV7TjSZ5GfsOmZUNfVSt2Ggy5sPfTXeaIjbou+iNsOWqTqfvzavW9jZN5/HtW1OEZ9PzN6rZCTd+1j+dUc/anLdAH7md4dq1JH9P/jTq+sJfffWtRrDcF0T+cE/ZvuhJ/M1lFuKpZ2MSy+beNK3a7w/fKEPgGjQ62/Haikrphzc357Jq5b1gs8En1gkzl++nHBKHbSeJghLwo5JVrD/mvZLkmdGp+v+sFkiB+cHB+95vunXV3fk90z6KqBYIzB10aSDvd3B4En0/Piye7TNKfjfdGdjCHOXdFsIRC9wm9of+5tcOziUPwm/bFEft3c03tOqKwWzmx26W7DrK7tk/S23opLcA/+D53YtlzBQw/T4yCMps/+97lx5KT131Ve9Ipd39kysUrVszBKaa/RBT6304MMupskaN67h378J+tfglKSXldLrG66IKhnmWL2tkQKzvO++TERnTKF7ENxnLJ7ddZGKEY5U0WcFOVfuU4R4aT1DYYgof2V3vV42KM73mDt9H/hM7vd9+vuETXBN5KZ6Rg8U73bivv5zHz+E/8v1FG5xl71+CETCRgPFfpEqPJN/rRH17gi0n9pS0uqBd3e94tKpmvZf9ZoSywV3d58u7t7kzTxOqL1T4ISu+zTn2vb61nZf6fNL7wlkjCvhBpoJvXn7y7d7v23u/2Yu77Gs0/10JkNd/fJ9pvvZj8cbJPVS91MR39YjKGofZgO/6Snt1lit7u95PaaFy63d8J7Yy7uXp7vbu7vl9JIllBCdu8oujoE8ien4/IZOkykffWY777EyEfXmn9l8/+T1XdV61RSV2onL7nf9pdAhstz3SYw2CFoz3733d3R9qlwkuXBJGadTuZPryVVWPEevzPu1c6xY1q7rvr6+n60Q4vUUa76yneZuXyqIS8Mnd9z8WX8FZB+nc+BItL5NLGo1ujtMP2p/J3R2WBLclnOviF31w3nz+mjtsJC47lfhGb92CY0V+Xu2ZH03yCyjPJ+pag1G/yetq/s7pevSyCEJNny9PWMdqndd1rTkBUTdz7eCDmnh5d56FPJL31gKf7061xxr1Fu7xoH8Puo7c35PvZZFsPjUDle51ow01G9pjSr5ttyfeKJpWP2ivy5Kdt6W9933l3JH9+0Cgma3n72ct6da7Ut3S8mFfBFivTZfJ/OTXPqfqhHv/CZc+ea5fNfzT/15mUuePZTQxkkIPNH3miCnlnH0UG6/7IeyZgrgAAAE4EGbABXwV+UZzZ8XUyDTVfxcaaym42dufF5dRz2GfFkcZ9AM36lsu+fbCt27QMruMhOul97Moo2gR/Hd1xhC84n/CXCf67wVW6olUFVyoMZ5hQ95YTqr63SLSDxKdI5cJdXlgq8bo7+GpoOBTHgz4VFUn7v6icR/6VJu8vBgV3l+Msd/yyv8IVQIJuzNzaW72ws/sEAgLip77uROXkrRMPIXR2Uv55B7sVtlQUiUA1u4i6fBXq2/1JEFg/fzVdEn+fb+/JaqThdmZDq3uTo+f5xcjnr52ydS/27jNB+EClSy/0T2j43u84n5QKdIxOBvcFMefFO4RIjqePSwU+xGRtHlkjYbBRM6rLFlSH7Buw4wWaYb7rwpKZjRXw/tm5EO+hi6/6kWOmT20nXwluQ7u+38FWd5xNE7iQkfc+xleBzJq3Kg/svfbm75WH673+q3NwReOpfL/vlLKfIaJFICb9xQ4S4KxW4rGyQJetq9hQj7EHZrqDLI/G/0upcJS0W3r8KQxXadnkH++F5gcTYiUddxixnE6+S3al4fLieeKDc4t35S4E+s93dT/1i/Jw86MhPr/0JTmryoFgq39jcgRILmnzP9+Lw72VIlK3wwpoJeCTPxm5uL9ofnx8txvl1pQi0k24LJFIhXZrt+7fHy57KCZZP6/OwU/DKl9Aivql0QKsxzu50djecsk9ythuHfVHBPufTAkv+P32Tmn+urgffhulPGdGV7jecuOpIzGtMbtuiM9i6EPHd/+vfeHeFtzyvfY1gmn/ge8u6CX+7nR+rz1i/MW7uiekku+FCZmnDG2zXil78NJG6b6fZPas6aTwU+cPvxlNuwRZS+Q1OPMHTtPEkKUXLxxcI+CMIXtPTXbRWGxMgqGcjvDcnpN9PghOxT3URbL8FhjqLAi9F/vl8xXyLyhSWtcpcYNYPT5PbX8VNh5bH3gjLA589Va9tdkITOwsvze4o42u/5fhLyEe/abOr+2lfJ6pErehUqrVt5K9uYr39oTffl+nyfTd5+CIsfY//ZU71IQ8G/e9mQJb33vKEfMIl/2xx3L7u25vc/9X39UCE97zGmyldkvfXVv2oTv3vfbaud10Rrz9Ka7vCPgmK6W27elUn1/LQUvd3ity9qDt9usSW93bnhJ6rrr1XX1mK7N305SXvb47rp3UNRddgX0vWXd9ZNit3vp6xW7vd38VCL9seSIe/hPz1mK8q7e6kyid35kW+/pkMnd6zyxQkr7vDfx4VVkkYIzNp507yzZ7fo18/6YR7vy9Df1rib3NB7/u7v04QNcCN6f+/7vSRu96s8KLc7qitidt93t1GRJXPktvu9t6WkxP1Wb2cgJ7zwmM7opNVk/XonKIO97hLyFm3+CEh/CH+ml9QjJPKySQ75329fMd5Tf0HBsFWpbce8n9MJiMtOX018QLuSWfGvblI+8nroIiKqa735Ize73mz3u+vUt0rUn29FuRgtM+FVI03KnZsJ+Sh4fXM13iDPdvauE77bL5PiWCkb2r7la/ZcFNryJyBrvsX8R6f1ZHzoPqn0+SI3vu+jYV8EW90u/sFRG5obpOXHbCXlTo7FxW5bFeWHX5Iorvzt6SpMEnms6fj82973vyRReTS5/3jC/8L+QhT5t+SIyUk2ozp1/5Igr5z5S728nw98VAAABFRBmyAV8FfixmN5uzE6DhC+Eea82RdE7/l3KtNeXqtoXVqWh8H/Xhkv/lhEkw8PzbgHRJgR5e+ql2pLPL/bti9odiDDg8742+WErVGYLmFKNt3V5f4vx/uwS7GppP11LcE3HcnHby3XpfwgVVSvd+G63L/5Yy5slJY1O5HbZEdqd2Fi/+WMNuO0hF5sX2K3zms13XWPRD1+424vbwVmGWdTZ8u3xM1QmRCH9D5iE1uU6zdry59WvkdpGhQj7Tic2LZE+/txSqD9LBTQwzhJ+0p2KVV7jatB43eMaWK0W3xy3zdkMj3/z7QOUu+YmW2iPHLKm28bT4x8MCa3Cm+gw6ktOGZza1GnH1g+D+5F1Qo4mP9YI5h84h5U3upt3Mdqs8J9zA8fmHgX9wRb18qIN/m5f7QKJb9t3Sv5T3PBxsJl/vbCI4VuKwkcEjOTRWrcrVkLlgpJYTOKYErdnrmF5eOJBkzL6ffuPqetb71sKYH/xL8k/czw9hPvPwEL3L/E/MGY9eqLqW8s9dFlLjQZnC1LSifNclOnX9fWTu8nq3q1mEtPeT0m96TBOKvefAk55oIisZPVb6oFGOH9Ikxc2SC6WfoS8E/nYu9hf4LL7923vrX2Myc7nDpI5A6ZuPFz+6vBFDC/8tC12/pCzXgtesbdn/goLEKmAUvm7/b92y/+4I4ITZN/+OxvvBDHO/3X5n+CLROTHhPoq37gpgLv5yLvv91D+8TYYkrOJ9flF47vCPghFcQwaqLRUzu8bIMgmdLbi0JWmdjWNOilf7ySi/e7TysYZUbUPre/+T3bp22wiQooV5wy0f0awJnsfPxquD+b4UKPij215ir5Jlv1ByNbi3Yq5ZPbv3cQV5a313ihG5zyfeT163wSCdyh2tfgt7uxM8fdW5+C2cM73fW0ipQkZqXgzO80e1XZ97hF8QmCEiqLpt7rVbYJD5ZW2+KQJxHDI334sCcCzY/rdk+3v8ERR1y++LvBKSAY26qy3fvpx/bokglyC+vBIZ367FqTYuEPMaOb8v9VgjPbLi6EurcEJ73ll9qif9lDDL/1dL6CQhz/y++gRZ/100Xd30bCHr3qCLu//RFrtPt60+x6hfyCWnfL+V7jyTv7n9nHLd69WJjCn/oktyX6XWIiZDvX/dOXpc0Egi7unG18EXnmkXovn/KyZY271sYbDfnm7hqL23nzIHdXYXx/0MEnjvd30O7R+FHn8I+Q10/xRsvV3v8EZ1l2jvZ7Ljb19Nmk/ulPyfSuX4J8n7lzTqu8h2iikN82q66yGe+k8vf6nuqCPd7vc+nVuVXSKoJsbo7fjOHrTMKLEzsWR7glrWv0a2GCfViyH79eT0l+zSkK3tp7RKb/S9Pf4JyO3l93eZPViUUy+E/II4eX23+KJlAz2FH129KmShKfS77Veq9OWZUvN6kcYX4XXvVif2CXLj84s+RNdm+bDOoieHbXRPrfVX3fms5U9wWQAAAErUGbQBXwWeERW5iQQvAOY8sy/0ZKl5b3hkv/2EdQ7FiIL0Y0WZh4y9pcW0pl/t2hthI2a/5rXDt7ZC9KoxF+wXcG5eGGyfL+/hGgsZYlYdT77vduJhC/SPricC5ZEcwCyerll+K1u+HIszJ+l58gm+nqxe3fuMLd973DEsF0ntRkH/iZqXF8NZR/hZ/YoYJHHd3uX9oKbrgi3JrnNClw6hSDP+6aRQQ/Pj88wXEj2X3yX3CFb72SuE835fPznRiBpNOWQt517X6Sl78qQi/L16gou+jfTMKP3CJBI8UYRT7CT+VnPAt1bbD3pv8rexkNokCH9ZtJ+nNWuBc7dTGPHYyQqUpq/hZueTLqPi3SDbFh/uNhXryNaemKynyt/VPzh0xZ2Uiy1Yzv51IWtXpC2w9XOu1dG1b+2zdAQ+S/CWWBXdWucI14CC8inZs8dPWMESfjuVdYLXraPtWSN/Be4ek+q37GlMVFrXQjkoelsdhtAOE6UQ+j9jIHCeOtwW5aUbwjiuX2nZ2N27hvC3OvgE41XcnuVH9Tc/d1h7Wf9/lu5TnL+VuuX9XwRy6ccqe9kGF3dz98N3+70b3yfpvllQLBT3MGuPtgblMEHc5NI458WX623BZw6l5b9IPotJhPYdIND+rFQm/wVEjePaiGfd3Yu19D+VwfokwiyPPeYz8KePr1wPSmnn9wc8IvQ3uKMcvT4Zi23qgkVg3jwtOE3vupZL9YI4QeO/lUa99a619GPe+knu+T0ktNugRGfY31k/uqzwQ/LnflF5PCL9HCwqTe7u91ZVkhViyou56giFQh9d3x9/wsWUs/1PGYB118MUn/4ICCxD5cIdw42Wel3prDlp5/4kr3IRu/y2ATWH+vJJF/2C0ru8Nsn+7EoJby0l6F6wlMIbu7vtOsImyLlIY1/vQ33yYsTiu8S+EfBCRDlZz/gsvfe739/td9aqCIhdnzL9HevElLHd3fo9CH6wTlMHcw+99duW78npNfXpclfC1XBJd/oRf6NLywgd3dxv12eQycO3+UpNvzoSUEmleObXdU4S/+qInVfsTJJ/v1oQ/m93k9pvqSiR9w99OJ51F0+98EO7vPCT/BWWfzsu2LpXnFbf296rZY3j3voz+WU73369fX17a8hL31gm7vd3c2qr8kIvtR+pX2fvYrdxta7lECXvbZ32LZ6+b6onql4IZTguEnklZ06iy3WXql2KPn16SrE0933vNaJeK/J2uSCox547l7uTPOtl/uiMonl4T8Qa79XvpMKFw8vi3d9gneWw23/d0UPr0HO1zRF3d8/+hh3iu7u598u8n6d542Ql76x93zte73+hG93vvbTyevebRiblDfUTe+5NsJl/XxxHvnt+fx1fQKO4S9LTXMaUZPtqxe/qxZad93k/t1UbKV91Yv2b5NPk/ThPUhHe/sYR7u3CBK393d5cGfXVK/Ytd/f30qy/owl0sLZEqKsv2JXgn3CzVa2KQ/7NkTJpq7CHG1gfaVlxK3DS4Qm8+Upv0oIyk3UraaE6CN3d3Lj93Dj3IxM19336TOWRz8/DHgiEJJd29O/gl3JhNVktGl/Jw8y39lPeSav4e+LgAABH9Bm29KQCvgr8wzPM5b4vNj+79y63XlrJkMeiC6/CO0lXAI/qcTS1OK9BJ65cvcsFcF1qbpcw3gKFM9twwbTHR4nmLnHzg438/zR3b/k/pXzzYdIIH5P6fzy95/3LwWwKj19DC1XWs37uj7h67uGl8WpDAfFZZa4bmP/yy7NsKl/8sEBo2MhUpl3YXhN0l66gnrdJj8l15Y2jnLaRuk/91O8fbH5+vR3eswGvbZbTAATGfmR/xKW2Ta7eNWtOunA+tV/NO19/eeht5y0eZR95blx5S2zpaTtsbUk7yDvOcGgg89di/jBDra8Wo9XoZ3fXBbDu2d29Tuvye7fvjOMYpGK53jc+7aAs4u/BJ6YttZtb/psvCN94EWrr+/0htCHYscn01+WEplyL7S9ocyek11uLlY8zp1qSvBDywGrUoawT5NyXTlTpIKdO7n8sGrdG/hJ57Oy2X/awRnc0nJUrEWQm/sUOEuO7iH1uHkJmX206cFJCinTzo5VjsWOvfYmvhfQKbd5qXdv169tRoGJMumeD8OL8QCg+vo7/L6TGoWFWF0W/El03FqLQMdt+CHc17U91Z3vpvcvlYuzfWauvcWIzJnShIuf7UfaPf5PVWttwUZQ0Jn5+1PfF6Ruwgk7Tbb8704R8EhMvuL/J3fuaCNeYD/+COa7d/sxM4WeqsbRW6GwTRwPnwHMs13yoj15PdJZPqzk3btAlxtwfxs/66cZzr/gyVpeoloYf7u4S8Ecvpt3fQIsv+yfutrEIrGqcbIIgx4mOwb2WFqwSv5XJr4QY1+T1vFLLCNBHYDMsQma2nlVsFEkGtcSCx27xpO3CHePq5Zov0Tw9lzskou5Ke8goNa9v6EoFB1MVLrK5g+VZ1dLW3yoSaXvzZ3rkEvQ4R6BGQrKe1VfTe1XMRzMw/sN4LSgfQav73uw+8FViXou8eci088/92bsu7N6axPa24Lbt0hbd3Mel0N/u791ZYjILN13fSS4LLv4ru/0I6L5YSIfpYYczX4vv1q5vpe8EUw/W90/ZOt9Fc3+C2GUjkfveUJ+CSu9b8kFfe3Da4Pvfuy1fqT2zH3dpxLykpXvFaNd++0/Ty/vLJ9Kn31lpO8JPtxV3C6Qbt3eT6vlRxMol9vqCKzvRYtC2LM+Yfz/J/UnjXW9aiu7u93ZYV3L3AzYIunWh6iXIfy7T6jyvvTu94SsSEUKn31ZT5/9dJ1ZfqxfL9nsl6HWlYvPnmX00JQuP3vd7t+7yfdaXmJissJPa00yPE93EstB7Ceowjx1brXG7n1FeT6padQUXd+am7bmuJFu/n+nsn8feXvPeG3c/607L9XWp01RE5r32m5EU17/Z2fhPyGNrcZ6tcFxHKxd81xfiRJSt7tlt9le3y/L6E9JcgLue54cqdPp0hk1p/ZMLLJx8Vz4K90j5wl6OVhHpHyXEKO+XH0VEmn6olMpSQv1gpq+a2DzNktStrHJMOVr6mT/8qJ3frDGaCWicH/6pEtI/glpFrq3P6bWJ4VvB7xIl/KbWv+Iu+6+HvjIAAABLFBm4AV8FfixnNmQfHJXzdSy+WwR6J2XZFeGJ87utP/junz1xpf4YL/5YYJeEpxsI6F9y7hJx+PfUy69QV7mH9XJ2cDkLbSSm6LKwt2QalCSK1NG55c03+qdoVe7wDN8PHmfzJ6TZeW4KNTt3XMmmz1CN5/zt3t27TjCuFxU/97v9wJ8ZFUvKUgjCR7ZQwqX/2wWCoceLCKVujVH0zJiq9xvXxoYz3BVP7H7PACEa5h3nslL+x/R/R6vqyFvVx+tbjxB+vjFgifahp7yinGoLybSP+X/dxkEv3z6+JubeeguXv0GZ8IKnxhv/3d07yThH66+rfbQ2vbHE/tHHwEOv3W4/BeaniU+l79GDVUs8bOepB6dzpf8FE4XHxaHk8q1apBseNJ1btmhKQLseCH49cfSb+CorH5+55bnOKZGeMUy/6ThSlk4KZ1aJOYS13fVuN7zR0wy/WWWCnyQjNTYdOvbe+8yb3wh7dx5Datx2Hj8cInv6fwo/cUMEvFYrAIvewx0+tAltm4pf3txsLso0cSalgV9f33hzusw3pHf+SQ+zMtc4f7hIdn+P89ay/t3hQmUiRzLM/WwvIoGqX76ubYib7h1AY+vBHFnyfSVl7iyvwQPmeZ/taoT60NezV+tYMnqnLufVF6r9K1hf42CRSO8DVLzaGBIjPro7NSvy/7VCyW+xdyF7mTrhLwSbpDeMwl/TiCwVYSO1T/3d932kW42vX6kOvuPifrWuB9ho/Vu56/SW4JZYfMNt8vfSXmNk1+SC0t3mAvDcn769yzLGf7GyZ6nv3ghhl1t+qr1XQIpYfL3y/7e6F4dZLVvhHwRRXv79a3lbghLdwxezTPZ4IhEInnt1vtPPC1s0ucLTr+hO5t8Y0/b+CCPiQtuuQXhmJhZDttAq8N0l9PeXI4E744vk9v72xI25wlp72mTmGXllT4qCMqb/un2l0Ccrvn88HfJ96ntpgnEPQ7RF/7asjEhMTunn8I+FyVeP0+9av/wR1rl/VP+FuBpOGXsh+H7Y26Ycff17n1h9xf6/ZX31ojVXpUQkIFd/d6K/wQyaEbjoJ3qMt/gkpunqEtwoR3dsu7lze98HR/Wv+xb32uvVnhmV39eT6r90R1V1RUTL76UEl32hHwRdN+yfWv48rlx2vty28Von1YI+f0+7J8npNiP4KC3fu6UK9QQme9h1Z332J+bvV/oxEr9nRLvMEeRmvfS+Cre93d3dyp+xY+mfhF9lgnGO5/bnUPc/pCRNnlvfo8lmfOT216od1tX3fRvW936U299su7EVHSQWTcsr31dYUEcb7t7M2Z7tuUnCT8oTBPdj+X+8QtpLbEpP1UlG3K+7tFrpdrm01oyrZdpq4ve+5du2sTPB3d3gmwn4TMeFzs2w03Lk9KpVLwnavxCEX7IUXcEA8Z+P1+SLveHF97jvtMvqq6ly+TuIZe709UOM794MnQvvCnk5chxuL+wTkxWYPvbunVamQIRL5ekLxH19VMmkpFNd+9cEfdxXQv4Kp1solPHM8xfKnklvFbrxRWnfzXL6XkmkzWXy1zXnlyf1iPw0XyTfES17JbfJG1XT5HS/avc+3JvkiI1n6pagsgAAAEqkGboBXwV+YZmgvxe0FPsN5bv8vkyvExZC26XVZf/f8We460+bHDBf/sYbi8hY83gkqHgv/pL2q9oFcoKyVJuYBiRvK9wh9DBLzI0xrewYbx/xKvr+fCek3PBJlDDVFqqzcMQyhsXlYqVXElX8MPW57QI+deVPwQF5suQt1UbP3fWrcv+1IEK1msCsdXvu4WL/7hc0J+nEOdvcrC8KOf1l2NjX4uC3K6bEVpnr+e3ZN7Nh8Ey4/VplFNBY/dcx8CWh3/r24YsK6LsV+RSS28cP/jvvuM+ySRJJA9c7+L/YXO+5BV1ulOx4eMh5Kc3K/jayFzHcIrEyNfstbur814TvMp8nrVk+MjOn/ciZJDj9JNkYmlvJ7VNqROWcL71ReFy7pHAyib4CiMQPe3/cdtwG/wZh0SRGWyadCWdL/BES90qr6y/75SxuctgE/FDJfAxLBCOb9vlwuqp8vt7WMnoHUlyB2mOcdo1pnaXEZ/te0SF+Ilf2/tB0k8uWB1v2k3QqE+ofcQhrFudDjlzH1qm4+BZkC0E/oenK2P/+JLqFr/Gd02Jbm5AKafcnlXq8SzHff4RunRYdUXceLe6SzwmTZuPz/eqf3+CQg7yGcWjgJ/wWT5d8N94sC7GGsIYs01iNCb9NaRdTEEumjrwSEsgJO89L8N27hMt58zozl9EQm1dG46FJh/S917dYz0Wzu/XVgiMQfTyWjmuyRtbpDN9tMTvd5l4YwZvy/5fCPgjma96rfo7BFlH7s7FsEpAKrRQ74+RuTn+0+KQrudswVf3hDjERxwRyjDYxPBCbL3t9xI18kiASqqvBaIu8PRejVnnXqaznl+CU93lxqdXNpykoFghih5bw4bckLhD7w+rUjv7vUJnfeXy8I+yHyOmqXu/xVu5gRff8FsEet1QXHxN7AxvvtRGn1srb+Ofxtf7Az2To6BOWf/GYBrsbERwur3M3TX/ir781a/BDmn4b1ZAW3b97+hLL71fvX9ePsTvlfSd3dbkvvsbBFiO/tpOvdAj3f2rqne+93E3dvu8vuq4Ld7ve/pwi+XGFPj0Ny493uNxlwm/Jus5fchbz6i+osttFvd5f10xRHfu/SfZZJK/eTd3pqzRV3cpJ7vJ7Sal6RLu+9pkVOgj7063+Cch+3viXu4r235YkSeTPLHaexb6KUxHnN6p+rG+/tMJ7vvfd6yWO3SZWC4nDOLISd7a5STCSscI+Yl6f0Ivv5RIluvG1naglLd+6mTzEEXv5/a5GpCp6rrcsJbvp33WmqI1LdkW/zdwsrL77Sha7y9u9+4hz4UuwVkTveK3sIF6sdmBu8W+pNLZpDvvVOQ9XXcunxBDbz+9kyEdpX1JCfh477UfMVZCTRD3WN6/61xpNzVYmVsJ7c+zuEf7te6X38uMPcm/rysPndwxf76nIC2O3RKpMuf63Pyhn6KTN176+5ejRl6e7ve73LlpUoK5X/u7vpNcsgRp3lwlxM06X1K4f66Fwp5JHr/EaIsqj4Glc5k/Ui/Hmxlm/3eO6e6UkFl793fFdMOv2ggd93n6Zf+lBbnz3NqKcn9m+vkfbs7k6vDGqEJw5rvpLJZZ/cFkAAABTtBm8AV8FfixmWzZhyXkuX/3Hea93Lbh5l+yxc9T2zclPnryzRIWCb/PDHmzyJOl8KE48vIJvGeKzR9EdF/6iNtec+QXuFIXdPy3Z/dvCAi/o/iN7+6335lZQEBp28ZvD2dtS5uENuYt3/d1F3PmrbaBFdJ3QUqlLoFG4xdV3GneJNYKL3HZt73KBKeh1+Cgu54XdxWPxuQdlx9uw+SbvBquXtaZOFl9hE0hkIXRqwJHHFbryYslREOGu42tsVkHQreeZkHv5aUj24B2tPHbV7q3yI3bD7Y66iMyZo9xq7KPx404EuShxv/jeFeRHXLYd7ofSzHB+J32cMYm8yaty7YYN3v9fZ8tjGA24nNaw6lCI49NRE7dt6rPG3g9OoXfAlM/4sMHEePits8qefdQUP/Opw+4k6QY/VT2MvuSM54484aiwpQ0oamn6yhb+FJ9puJ04iwdYDfCYu/3Cd6B+GMGvL/7qO3RUPvfe93eT9IvugXab5+9pX8pXmMuE37QoUJHisVhCtRWmYUEPfO+3BSQI3mp+G2kfKoUtWywIl3JHL7ur2mhLKxt9GMo+WaH+Puyq8KbEfU+Qs0rcrvskv+t2dxXYNtYcrY1yHpysmn3/caXLTZ+ETnT+tDI9OhidEJgJv14bU38wYb7MN8b+lvCHj4dSHaK3KFN5PS6fwS+RWnw6lujppstsJnyPS1m+68pLs9VlghK747ZPSv1KgWCrBuHLutx6NdSSO39wp33T3ooJfDQ0cpIYSKHhKnbSzPCXokTH4Ip4XftJK/Tir733k9tMW1yoFnafMPO2CRFe807qXh+L8dodeon3J9+2+CbcP7rwQdZ8xQbeTBL5cfr2qxNe8kSJn++/cl9+4IRDsT30rWFKLDvBkXao90fPwyx5uWsPmEn94R8FIjl5/93bx1X1gqOYTcwrcBC1VZ/r3afvguT0235XBMKnmlR+wcRsCV/zTVkiuUccMa87ff8FO7wlu+8fcy+5l/dcw299ieT3v/243gkJIcMc+ryCONpi/wVCb7vvlY9CPiSZWL3qzvf6Fpe3IMh3p1xc61f8RPEoRyAYgW9Hs5wY5n9p9AiI776t2wXFffl8HqOyqX7dOnt0lKv1Hd3u52Hd38kf4Isv319giET999e4UEu43isQ/0opFnuUbne9uJPaevEQTFRvc7PuRO1RIxk9u1pKyFd3yer074IiQ6gxH7r6yFd780Ze9xwT93e7vk9u6r2S79pgku/4rwQ+HpJv7IC6Wl33doR6BPJ/k16+gpvd95WHu92Fpvq7v8UurNd78lnz+vFEu999tC73vftoR3e72/lLysmQeSESfqSV+Ce4rEj7gDYlb4z+VZ6iT3f27vve8vk9LPc3BCTd0oVklmGu9lsaxAznt33VF5PST/OjX31Tvek0JIVAlJuATarXvvw46hx8eJ1xf973FcI+Qmr/BOIveVj6kp52Ya7+vdqpt79MEZLu6dJOJ+k9La/S2SmV3feQuJlwpx8I1/xvTJIP0JeKyR5YfwgYveJfEP3nh6i7n73gb8RNjGlEibZ/5Ri+mwmR33v2LhEr7veUSu+0mcmv9YR3dtz527vtMhnvqsR9vL+tyEI5aPe0mJFHFl/ChfJfLFDL3d7vemWIOsMqHd+0ncifxnv8VqtR282vhVa4i9p7bkNXl4nd7pO6X4Ij7ZeKRk/p/wpywG/X35r7ukdZEaC+1d73GKby/78sdu73dKXP7OfGXXwtkgiEQ3+6QOSCXn/ZsJ0uvyXmb5Igt3vv9w+98nw98TAAAEiEGb4BXwV+LGalvATP0ZDqyv3LhqOd+L5dGY9oN5aqwx5t1y/+4LCLIlWUUhN18Nzi9whBGOyi/pvYRQIZOCCCTz2dN7McF+5shIj+qcrCHd73mbG3BJ6uW5bgjoIpXMNlRVosTXRBmIeOK4AhqtyyzcrL+1VkK98v+7wsX/yxRpSc4dP5+2NnlenrQX/rfmAyyI8pS1JYCVaIKReFoHWudX16d0dZNw3GK7hVSd/xzyLhLwwt4//cbWFrct+ik3bIOIi+Lx8V/R07EV7ri2E4bkHpJJV/Htf4yJtQGF/0Rg0Hn5gG1xpU3cN3YC/xdQxKN+RM6bAL8aU/k6k9iQ0GOk9u0s8Hwrxv24plJhUmOv9OH+J13nhBWOxlkFwR73U8l7/GOvIzSXQzeE3Ua1aNfMuB1/B96T0mk+T6vkaPFXcIn3RVh14oLYOS/9Zef4TftgnGCQcEjgo7FGHUsJyH2V2NvrCkVhlZQanb9zmRkoU/AVlbCoTFNR/sPW0pD6y+1fQ0hzcJKfax/S5J7bj5ApeqRrc9QvhtWk96CxhTVk7J/iywx0ma0f3t/gt5NzkkFFjPwTF55XHrGK4DKVZ/ThC9z5ec0YFvb2t+73CJoT+4dwvl46LF2DDkH+4LLhl7ZAuCb684Sxe/2siX2dEHxZbLxoS9Ey1p4IcvnjW0junZbzn6Lnp37hwlO68PW7+rxbHlx2xvlKlCmVGUf+WXRnfPda/WreuC3h6c/5V3bc6aZIsvvS2N03CVGHIR1E/X3dHw7xwwCW5Sd3slw5NW+SPk9733BFYFc4dY/BHckzhd4bb8QNbeUP7uvEiE5l5Fe/wSl3fjedZPrp/V8npq56mgvNuR93MHVrPD/YIzvfUI+EKbKRmmRne63+TP32/xVwxJN2lbHf4d2Ya4Eu5+M7R+Q6yrMC42WT/3vkIQGp9+2j9r6Rm1Sn07xMInfe93funL3ltQh6tWWELu7uKzwz+9Za5z7ZXu9dtAhIit5E7GskYff3qCe+7vdlL1JhRseT6pO/3rq8I+vb5cJ3EsIzzu7paZZT55VeYl78j6P2lrXi77Pd+10VAkvedPzTL8Il/n9lczEsb5rBOYub3uK3p+0YSZv0VAlwyh1uu0tKGlHi1v3d35PVb2j2MvfZ4ju8+el/EXd77/Fbvd3p0moUNw23fdre39vfJVXpjBbvu8sHu76UJ0pRD36OzHd30spJbv1iNF3vJ9aX5iXvrZRic/rqj+vVKZbrxO3LzrW7hPxJnx07u3+Cju+aw4pJP2LiRL3veDvXe6hO+25A1vrv6J9RBJfdkVThTyHng7ynJGktwteSjKOuK20rvrfCF/G8W5lZS6VR7v6LBSXgJ2u8Oxte7vy+zFOfM6V5S3P5l9Fzc1vXqiCm2keoSz78uXRNApLsMJrci9uQ5reizf4ylTczDLkEe7V+ikdkm8LPzTGcJrZz9a4KMTulpFwgHVU7gil/Sr+lJ2UsC1S7zMRJIUWtZv5MMaiJIq+1azCnJBdMjvJHLTfTa/BdOSjLKnPPTLkvPPBZAAAFM0GaABXwWeGBWXFdY7IlX45b/KTfqry8uHFwx5tX/G83yg2HotFpQl80gQD+MIaKOf1saM/l12s2dw/D49fZ447IEXB0id9ht1NEM5WBU7YdSNYO1M6aUtyE7JaXwQFKPzlrI5wq/h2LA8tPtSmftsTsbObd7V5u4cHZybmJFTFefbTJTk+ny6w3CNh3u/6V4hp/lhLsnkv+Is5bdar8pW5GRk1CvgsES45n5a9zweaWvvJVW5Y3QpuLMfdHBan0EonGsvVVkzaSnYNZb8etkUvZv12rHK7Pi5UcQURIMFrp2xlwgyvS2qppv9O5tU87akP4/2P34PKjBl89cf9jHwLbBdS4Y1uXVW4zC+vEwpFv131AI7X6Re7tnxxdNPQy93OguaUKyn1bfkjCmTsw4/CtFFj8EqqP61/RIoj77Y60r5syxhFyOtcGHNENwbqQXqx91X3BCR3O/Sq/wh8sHq8C3wsPFflOWT1BuWeE/BAM4EguYExwzxIeO+bfVrgv/63t3X728FJHDd+iefFcdpWD+Jc80X3YfseLvO7b/5PVJvdxsNw3q7JFehtJoupXvDVu+ITC2QzBsYhmKDtf5/8SzkWX+svPQEvzY3VAsLmvBZYJkQ3F4+ip1pzXf0r4f3TvfL6u9AkJCAXMBJp/Df0N7u93sihak9u2dz8OOoRTc+Eieq/4JLkaFFX+zXpAihK9V3V2uPt0+N/OvROPPvBLt/82r2myD7tNr5Nt5PaUv1EwxK4Rsj8vCTSckKGQuNyFx9+fe98bBvYdoKFcpb7hxJaZM4b6yd9PJdfh8P3KNPhxkIgp6JWR/W5LfX9OCPMHb1pPPWtuN4sW9736oSR7u79qW4ow7AXJsCi2vh2l/xu6Do5UjUEpU345yrnNkAmH86vP98KtE/f/ixOanGc8Iv8ForWt7sFWUr76de2ni44UHciPtfaFbzhKXr8I9Ccs70rY/V0vgr5Q7MZOjw0l7buROi1k/SJU/6JyfpfVghLl32q0gkW97w62HwSWb7e4JzTBpGgSQNd8vaBOfP7u95QnWYuHbo+7JlRUt1kGIIaf31ghKvY/7r2nl9Obe7J/X7ku+EspRN7uf1HxeY+Yt36OlzlF/3wUb3d3dhXuXd3rvIWyeq+ywVEz4+73fWT3froEFzhd973fWG4/+ix+Myt2P2632+4U93e7u77vaEfWt+ozRrrfcQ49O9u7y/e+S99J42QpZ7v3e/RYnem+SGvrBFGTw287I7dYu+zb33ib2n3fZCI39pPUI+U8VffkQozuf4lodQ/SvgmF4l+4hwmkTtMmOq+m8SJiD3fyw19bFJ3fX2JQu93u29p+a7v3iCvTd39OC4UX4SvP473OmX0yWYmEn5wutvFEF90rb7TJ0JI3Re0vJe/WYl77E9p6fXpcQTv3iGi71xO7ve4T8FUvV423fP3dzg35YLCueO+acpkOLjhn3omFqyxrBGUbufG6fVrkI8vfWC4orvYh0T+l6xIkdjFJcWzeS3XL8kuaF8S7L8ur2nPvyfeIIEdKCcz33FcY27lkFy/d36s8hfxhHMivPSGhJ4uRY9xy2W+lYkfPP8J+CIRG7nI2/x5OPmP3ScZ8912oJinrka83V+9yJ1+Ijil/y/lx5PaTL5c1vXSiiXvul19TFe7y+tLj7v7uQvtbr4V8NYz3KVs3/6ijM3Mdid/4I7xW6RfNz/pdNCeMPufNp325XdnSliru/dE0TVrJ67r7Ez/DOSIFGjkwgqrNhyfX/qlXchVM+oLIAAAF8EGaIBXwV+LGZiZszcc98ubFDJsO4uPkdOg3Lf17/i+MB9kpKjzQ7Ax5t1y/+2ESVHtNc10rtRV+CjRGDRIwMwtkvOmmz2wp5iwQvDu9KCL5tz/KJPkJD07kF65fJ3LLGxLt6d7EwtZnDSycPr8EFrjqJ/J/f4nunShYv/ljdMsuQ+Ht7M/pTLSKkrWtiL2UVsN+sblXvjSH7/wyJ6XtAN2ZY2MCiaijPXjMxUHdd+hPNHqA69qbzyRgO91JdQ+638n29buEL8PwWlOhlfUM/Pq+AvznpPf38NcxIM7taD14ddX6aEssYUz7UHKeKhO6UenVZjvSgp3GCtw1K/NC/z8G8RGe6cIUEn69fnXMg5I/wVTwxoNQ+PB0Tpe9x23VHY69KfO7/oZd70ru7kinxslV+LKXmC93lyEy/3tggEBdpb+Q21fZIGqnjmQP5bAje9lB47hN5jaul9btDYJHN2i/Z0IMf/7fu03txl3NjXnOl1zJ29l7/04FXWvc711z/rwr+60vIMCgJ2VQhvHUrR8y86RBgTczZ13cfxV3GmE+mM/c99FPvsVUgNPKqH+/a6v/Sls0WKsqNHuKHbVle5g6m1hEPWcfmfqLtOk/e7VS1MIf7v938aW4isrLrIv80WMweaHsZOS3QrhbtLiQ/HbxwXiraT6T1/L9u9jIJXx7zcDvd3uXVs23GEYr1zG5PSV9zQoVEijNSeHI1sLPcMTvx9+/fHMVpVJ7bYm/ha8UkJ0L9b6R8Onwxmz/LCFzp+ZE7K4dStGjwh5GCvcW3HQNHEnL+xMEd30pk9prL3Gmu90NvNsbUNpJA6vlNODWztYHs4T7opt7Y2Y9fgptP3sS/zfZcizsZ/UUXHamid4Hv4pBNGavQ36hp17hvaZQr2eEl3gqDD3c9cwb7vacyby8PchdIbp+r+63ds7LIZsMPx6v/kW/L/3jNlz8/UsnkD/HCOQlV17mMYFmPH2OtcaXKPvq+X6FZ0SRTE2o68ZufJ741/l4IPR72C23BHiwfXetaxEtjWTpe+tcUM54GM5RN+4U6tAVQ7+QWxWW8gSpZzgT8kuP7HtwiX9egzvLi/dXXQISjYut/sLU5XESlNyhGHzPPk9NsrFvwSinESi+hofy030KvAk3w9fn/D931H3O0MSS5bH0oxS8d+5XE/7wRiw2tz+WT22/drugTGCN92Wtp1kp+ov19Ir7SegSGAh+w//8wsnqqxboJGPL6cvr1wh0yDNP/Lk9a88wl97/6NIInDcdJunBN1AZ9wXc0mhXP22CKNlo1NgatjeK2CEo8I0UeuiURjb25Cnp9jUE8hKdt77xd3d7v04Jc4Otu9+2mp0TL+u4QhF7eFLvveOsuwk/3Fct26OUtt/Z65zrKVKeGlzb6JtO6fbJ6KF8NYZ73r8d79SE3Xpgove+7/JCFa1l9fx+pOHt3d4dHc3d8oor31p8Uy931hMrs939F9jX9mJNP0dku+ut6yfoluzkBFd/uqhFbePiv3cA7XkiC67tVu7fHCbwbpt/bv+E7WfL31YTu0+SJA70+MiBK358fb9bFJv68nr6v73d89N6VMsWSHurKgoK4zM7BYos/d75jaSzhMWc3X3rehwj4JzZn1w15N+if9Mvdv11WT3SCKF5ZOvyexfvILd31WKJToncv+8nKQbQXcJrrCGflEi2xu1Shl0mS9hHDsFt3u5kHBADfJqk/k9PylzIp77hVuSe7chvel8sby/1pZq+yeuX77KKCU/CB39C/GNC7RcCngi0ZjfLInbf4wkUDHV6Wv9u/VjqityxlzqCsrih+IBtaXe/y7vve7KdO8k7/7KVw2qHz6l3d6/RC2X92kwnmS3NIw7yetSl6jClm+Rve/d5P1S2sXe7MMXT0t+rgiH1ZujuFfRC2T1zEL8PQh7OtTg1W1M0m0N1Vma/P26w8+5f1COnLhbctl8uv/FS++556frvbo0/95KY4obq8WWh3tgwlnf8v9fDGZEIXQjZel+Iz2XLbzEuSymLtTXw98ZAAAAFb0GaQBXwV+LGcda9QfzZ826J/llJkJKQwZ+YhUD68p7jrdhev8ppNsq9wh6lhYThza72yrLw7bvw+y0vkHheOMY/k9Ki3ywSWQnSmlVU6jvLS9wH/l3XvV55ZxSUPr2hh93d3fobyK5C4W9GKJh+CDl5fNu0aNPjibDc8LJmlSf+WNJDq5kPSK4wJlS983x+XJ/8AjxuR7jM9i7+J0tCudum75Xl3v3+t0XcbNKA0ubnnLbwb6zpbB9mjkEv5n0j7/P5QTkvcLJOf9+4+GIMCwFpFFLohM+cB6ye+bGlDX/GF6RsG+obgx9Q2pEsob6Y1DFULpv47g1gMXBeYy/huDddHT+K5n4R8p/5PTzs68E0/c+ai0RG0Kzp5YQnjfOWvRS3L7/mvf8pcNZMJl/9xQzgExut/yHk/ST7/tjJ+E/H1fCTy1HOzls0JlXn2chex0Sv8P8MpIV7hQibRFq5813xIuUB/3mn84yVjodmHq0u16XX4eDxpZLJhw4ui962fhAqzw77AmOsngRUJzE2OB3CU9qNF/L3BGXK50yfi753vmn7hw0NxF7XUl/fVBT3d9+HUmGvdzC9Ve1oS8Em9z9l/vxGMrtXAIr9wpSUsiaM7wPo9DTtgcu+EPnh7xm1Lx1JCW/vkPx3fYXj5cN7bh4mLk1q970D2Y2G3KzvQLnwYzWBfAvDN+D6vFsaVgslYfl1yB3CPk8yM+XYz5fb3ZJ1DF+d6fVu44C5/xuzAtwtoTj8Zom8Ec7pi5icd10frsehvW2qLh/+EcxIsVIeYZRdHzX17guzhNWGbxUQXgbsfiRKe+Pr38F1LdS+p4IVSTTgnFFnuUZuQop5/wVXtpUYT3vTDiXI3MOzn3RsP5l/5lEiY3uaNocK4R8gosKzs69X9LvHmCVrkb3SpaWXN/0z0k+3ELdRhco+93eG4mD8xk5d/7grN5kA0C+qL7kPy924kkzL7f4KRZfZ33zr+ovv7ghNSu/v1gjOX/+T7pzq3FG4+8DP32/hMvoi95fCL9RNJoZpLf/zXv9kK7/IcEYl7ul7ogyYHQl5qu6ORryz/XuCLCfD63wXYuQ7M8Cov5N3fq+7/BLvcby2BrW38EVOm+oRL/8T5YKbu7veP9W/uDv/rysFHdyR+MeT9vH0HtMsb6Pon2/lW5Q1d+q/qioEXduW2nLBHd3shG9a3k2FNq738vd3mN90eY7t3q1PefH+16ku7vrBJu897ql6PL3ekl8n9O6uFN33fe5BZ+CEZPr3ywlffd5PTbe9oExLlRW8vzGrckV3d3vqaEPFFnyt1e+8UZ4rd3d/QkS97mOBhytJJvEXvfWT188ZG9Nid94Zq+x9c/trXVeK7l/DzsVuvr1gsEXaDNuX3e9jeZplE3uEfFXn2fdZfsnw0Z77fV11xKlj9O0uRAqEvd5tk0fnLZekdU2eEKW92eeMsbaFzMWXnoN1d6Sk9LKOTM930uT0kmvxxd32lz/a9F5VG1PwUXd93pj8Zu932tybwy/3p4kk/dvInaB2H8Jl/t7CZJ/CH+h/Nrfxd79oEb+t2tklEnl3DxD2kusssPz6a5G972ltbSUnf4onl939YT8ERZRe5G1+EzXd51+X8tSCQUnhyUGaX587niRNJc4ws3euWB83lRZ8P7pyzRG4+vz/f+P7vudLH/Uy0V6h6UlePrcZsEy6OMZ7QGH5Fp/J/X1iI7C1L6RF7+T0lroxRMqNdy0+RuVr4V9ENeSCIk7bnvsO8vKS7fe7ufpm/1y/Zbqjo1UntKLv4qu+7o68RCuVlO48zX+3WX9af5ML+S7/JEYbWVcPyQFZp8kRCUzUvnSJvfqP1byE3xWW/q+5D4LIAAAE8EGaYBXwV+YZjUqnd54S4R+A+WzkZQTX3LrdL3LcS4O1X8u5g9hgv/lhImoevnY4xO16hSZES3mzDVfRp7+e+QHhHyuTfcibovESNw07lotgOv5bq8sl36t8swlO/J+/6huYCxvliv/D/3J9v0JuED3dzypZZvy/7WJM9ohp7eFa1KJhl/9wTk0in27eQvcbPiwPaK6L3j0Eec5Rt66MqBMUeZ6R1ab8KNbvnPXERiuxegbB5bQe5EYnI+X/exu0rYC/9KrXwxJHJ+d5/YRm/vVDIm2+591J2Yd/w7eMVcfzzWT99RPG2Z+DZ+5QubKnv7WZPBSGHKWhxiyjkRW/uMLjMrXGDnM+3Fw3Le6rKQ8okm7WCWdang2TA9AjGPMv76XJ7rl+C7ug7pyjtOFWmeCLd3KmtcJ1dcZsaVVli8mF95wccfCnihHAdU+cFds9RIxeOuOWMnwCb+egR+xxMbNZ/Wdx96fI2nypt7GMYoNtXad2NU3P7/GmWzTW1d/mv8gtpQQdcrvXjYnWliHbM4TCoesHr1mV/J6SlfkQkurH5xfMHQQq7nSek3Wdvk9JK+nFlw+w3vvrfGJ/r6y3ey0XpaJMapHOS16gr5PcEbQcNT6dzhwl6vv1GF3KkFZ73ze70vjb8rb0avrbTR1FuOyL4WeGm17PwCVl+t3i6uHM0PjdivsZG9/jDUoazfh2/uZcXfQZlfKvaFGKAw+vrDi+7UF4IbhcqGLcPcaWtFb2ToXEVD1c6Dd3/HL30mS/kqeEvq/h/hyJeW6F5fXjgbrZG1+OK8yfXK/vcsfYmPxk6DaHQfWE7+kGaUtKTEiU3fLL1ohUTI37hQRDy4RBS1jhepaSSa+ve4gN6EctWbfuC4m1z1kNaA+xIe9jffCPgo3ufPqvBKUifnW6lve+GT0qLfrbQtlYVFGLgjbVFZMWwvzih2sQ0l//BAJLqcvfeasuuUWRiDtGv9L4iqZf8v6+NFMedIGZG6yA/jI1Gk8CD2PMUG9UFi9s/2mfmF5J94LSVD4/hZPMHRShnaV9TqIyi/7FrtIJHhxvJrvpo0Qw+Isr7uCX6jPUJ1rG6f8Ind03d+Xy8I+iSy/ibrTurKESf+0MS7qjspofc9oLpPF1g6wQlDF8f+6wRXj4kHLvrNcENPd8vrTT0T0m3zrene1pwSb36Enlygmu7u7ucP2qhJUWMe7hpJ/+X0tcF13D+Svw7S21L9fX0omzfeXPcmS39OCfUkd3fL3BTu73Tve6EfRO3+CHHV9uxpeqBIW94NrfVp9v0/VAjvk2C9NE/v+EtsFV3u7cETtOj6pHtt2XiRerMacdjf34gju76pxM3hF9VaaoTF7vntl8R1t6nTsX6LU9pVJBEIvNadyeuupGQS94S0kI+OmMKKz8/3feFW4/7t3bjf7K86elP+xOt9dEXeXd366+8vrrid7n+fIR8mNL0vx5N5fW43T902Vm8I+Gy6Segkd39Bc5xe1uzX3rfNl30X5O7Pvvyf1XTRs/5P15JSMJXNr3y8KeCLNudtfjzO+9PhtCb110WUqglfXJk/btcbonr4y1gkLd3FQ3WWpbyF/cZnJ4VJ/X/k/Un8eR9+XY6u+SLtzeka70/4REuQkndTw2e/wnY3MR619DI3q0X1Yfq0rvGfPz17QRmBuIcdLSvDhfUnyeXPw98bAAAAVYQZqAFfBX5RmWxdbmrd6/MTd15T1eGC/+4QNnVKaPHt3i19grruMTfhN4/ZVNPOnuEb2e4JGY/G3V8v2eCXc08uelVvlljg37QyVBJ6tlWXIC297PAmeh2xKq8sIHu04TfWJYSLh5RG68t5tcKeCQ2Nm7SjL/7YXso/OC/4t8cBKyV6mj9btDSPcTYTvPXMS3zIpOjCQQKbunlUqOrsjEf1udL8q123/39/ZgLmHnkSta/HaMTNUIP0LGG/q3AvhmQ5bdAi125+uX9/FeWIblI4+4NesafCDgJjfnYcKr75IjOE3N8OvZv55jLl3+4c8Wpz6BO/f9PyeqX+OnhkZhwhEh3f9bnftfCHqxOl4zBSZ5PVOts8FfKCQo8ggdfE1JvnSpzTnWQmf7fLCN7vn/htFpL1JKdxtbgQTL+ntihngLNMlCvsVv2xhD51CVCw7sZA31d78+YporGxgyE2Q25afP/3NwvKWfgZBPHfc1oLazc7nbn+KcU1FVP9YvmCgyy0fS/gtKNBodkpcno5bJ6re6l4Haa+y3TjEx1kv0+1rZbv24QEXveVmQCXk/qvwUSKQ82piv3CAcLHKE3+PLO+M7sr7t7+4yumgL1V+P9iNFBP8fwxlLb2srrDcqdcKl6DuUB3uZCDtbW95D0W81kpS+q3Hmq8TFpfg+cHkK+j2ZYV+EeQWTyfAVbyOzsJvCurWsFZbMs/4Sds2tNfBw/pRus8JeHopS4y7/qHqPmmf98cG9on9j/f6UuhIlO3uEr43eKfl/TJcUIy8BVvE5x5K5Prt9ISTsbaLQ9PnyiyeEWRhHwWiC++RlNXV5v7WvoSWNjT5+ve6gm20s3vShbnuHRUCyqxUkjq+iUi8392yLqPUh//jT3fBNURlrMELdY/8Evh7/dJVM5HHNWsv3+NMhEV9wnqvjtm0Cb6zfvcUMrD+jhXQ74dxJv0A5nZLNfbjuno8FYnD8PsyJ927UtH5J69wWEzU+Mi1TffHvxaD+1Fxd/QpGZOqf7g7mJKG+38YRN1fLV3489yKveaaqgSd/FCJl4cp+BfS0VueO70sJn7TBeqm8+hhrhhLymfWT+/xpejuvBTOgg3hZu4vD0G5vAvdYLZCuNm/u/U9cEm7u3R+T0kkt7d99ZC3vJ66/YIo/3E+xv2lZCWVhSK73n8fiNOPW7/9duCGldt/oEfd3qtcPxEqDdKSXoXIJciF+T+38TIaSfr7K3Zv6xWS33vk9J9ckF3d3e+X0CS7y/UJP0h/Aie6f43BJzno8WmV0u3VHbveq1yqifXS0/WtdncJ+Ey2y/J/fPYJzPFd3d33rhITd3LCRXasrVjWSRC+q9d0CraLPNT5cdABpI/Cfk8jeqS+sVd743TtVlBJe89k+k2yueiGo3MX6hA71T5933eEfNVMrKycutQUEe7Ll75Ba5SGFz+X8n/FHdrONP3WdBm+RdZdX+6vd36WzMRveeXr80EWnc4OsJz9/G+8np6RUILk1X7c279RArJ+9F7Du7sJ5/G5tThKNktjh1YZpH4S8OSRrb6nl/HmnhG8tmhe6WXLTsTF934JD6nnbvQk4fn1/2QHVtWak79WL3pQ3Bku2rt7cbSElve9u1xgn5BBL3llM3qxJozfu/+FNsYOLm987Q+79p0qfaHHyg80iBtec+Or7vpdJ0J1RPiJt31dFQ4t73e28vv+F8kQYp+/G16VIsKFenz848N83Cxljz92iiL9b743OXSriCt3vvrHSC9osYlLLk5hb6gkJu528QxxeXC0MavHl/qoYyRBCn75M+SIh7R801Of6/8kQWfFy4j38PfFwAAAEmkGaoBXwV+YYUWqvy4JHrBL/4vKwGmhyeq/KTcwLXiz3Oy0KclQwX/7BOYj5AEUGn3cbHikPcPw+PbBQwMdooxzAJptP/Dsjr1dJlnh/pGH7hLyxsi+xv+e2+r/V+WOzm1bbYmke/8ex/GHM+73rILmjd51fcUblx7HbFhX1TDL/7iuSrhGui1bou9vBATYSn0xtdSgSzYhvrdMMgfNti2+b+xDjg/X/9e3vUyE9Q/lPp8n6b7Z2M5QM4i1JhlcA6JDzdy3u/DwYPYyIwNtYW2wD7Suhmpg7IQifpvUurkC3fQE273f/jStZlL+/VZY5XGs1wofRpYnwksjXQIwatgWB3DXhdsGlei/J9Jau43lHQcZnvvznhP/amn/VOoItymcXje+N5IhuC/eBNuHLvsHn9nuYndYrZk/3vQghEG9vf5uGh37lUv/WU+PSFsJl/8sUM4I68Cjo/axD7WO1vjiCt6ywYh7GcPpO3foL1LRqB0luN001+tuBJuwLjs0mF2orEoZFjnLqFw+H99yf/KjpT+M4+kC6ppf8NSrvtJ4tgqKP+tuX4SO0asBz3Lbrc279YnhB4OH4e3tOs8EmE/tlzg0qmlLor7EkCYi97RF+1LcFl90wS/m6mT1DTQuKfFbe0JebeK/gilhflk+klV3HX3fGXT5qyFZPSX9RPaCn+EXDu/T6TzxZAn1i/WMzUn73Vb5n7UvReyfpPiagk5l/WT6reRwS334y9v7Qg12qecJFo0/X8I+CItz/uyfWq/5LLDFxL/rvBKKgletNQw9WtPRa8Ugod855sWcP6r8m3Znu2/CBoKVcwrh9Jof4d7n+j9S/BNblxSvgtn4I5n3SZv33WWCe4zDVAowRK1frPkQV698ewv91wmLSxfnCPgrufvRx2svezb31XkPe+i+i9V2CYUGpYdvbyheYHRbrPBDIGk3y66TY1WOOcNve7yp3/BDZvwVRYJe6SdJlxPFtegRb3b4iEX+FO2WL7LApEQP38+gmT477t+tvdW7El3ZIuCW87HvFk93zXUlkdf7lFlx9UXqukK6qwRR2V86Zfbt6X8I+tb1sE97u9/+jvtbTeqvVfWvdeuSylffVZPX/2S7vpWPxbhF/QJx13cVuK3b8Sd5g1UJuQPj0e1CZXsnmb6LJOBbvvp+oIfLE6VWEqT48W/eT6U38xHv9kLe9LePETSvh1ijX5r6UskeV3M3u45fELBf4Tf4JobtnruRqV0ztSRBN2XlOmOpi0P3jC5dzX9RUobeOrtrsbZNle31Zkuvrd4kg493ve79u7lJe99Y7eKyP8dTDYcJe4j/f4KjRlfbY/Tt3G5EgS/5OLvq3gT791qtRrKcgq/nD+j+tUga/HdVR7u/fv167UJXx+cueH1DIkJNrP6+Md8J+QVOYdZeEzXd46+fJ90+mWCETDDsUHeSC1ZNdl99JegRxW+RMn1k9Zjz3hjJYiaVq8twUFxWf5clT16OwmV7880r8kVvJhYbF0Vgk7iHBfbWTHWsuZOyPvF+yk/hjJECFT6f2dH8kRMwGH6nynitmuskcUlu57mvPvgsgAAAE/UGawBXwV+LGc2Y6XL3rcXlntX/MTbDWa17mn7/ynujI4MF/9wRG2h6ZTmvwQa4r4eRNhjwOA5OD1u8PCXx8f3GXvuz4Td5qgjgR4IqSt7E7ZAd299nhCcmf5zUO+q5o21uHcdX7CNfnSgl8e+nfzTe/LCh7mc3Hfcepb1y3ezRaV6Qk2bWVuQfwr6pRv3BPEg8UYkfhKvadhhT5FmX+9xpG7DGRyB6fb62H5CiTBbfM5oWj+y9TZ2Tn0xipfL9rZrrD3lkHJqS/743WllRyO6mvj4rU8Fpqv9kKJZ9LcASPz/62V/yhwoI9NfG/+4U86TgRf1rP8Ev+nfgl8vcWbSRSVeKYpKposIFQFzBu/IvvOSMFPjvIpdULreH73rVnDt/decMoiOO1LsFN093yDxxXFtl+9Jw/hO97kKHUc/Th71eX9ELoVlzxWPk4KF/9wTiuGcEisxXlOrph7YUn5SWG5RqJq/yBaCEdX+chXBclv2h6v9+4H0d3Z0iddwoQO7xjXXXOF+ncaPUi4M56ijF8X93b5c7kXX7tnFpXwnaA/SrhtxFf9Fd19i4gs/nbPS9Nq5vJcntv7QpGnJQk/t3uCsQ7/GilL3wSCseEy/3khQt5YO+l77+aMdpItxeCFsZk/2YFv/Cke8JT+saf/vZcPOxr2fh+C4xhs5X7hFwoMfi42Y9mMOJKYO7yk22Csr74+UqFmLDGnEUKDAW7o0E981st/aTzwRSX/ddeTP4eb4v3Hk0S2nt7beZd4/y/9SsWT2eEfBYYZp+X3x9flp0XkLnqVumh90SVQ/xY12D975PTasa9oMjIdjmeHya+5H9U9glOG3K1avc7n7znTjTU3Uz0JLh7gjHsEP6x0vhfGQ5RKHLTq/6wRi2cCer+L2OsEhJ3tfvcFGORd3e/urnRUCkuCHZzje/e502lqCczhjfNFLsnJ1HUfe2MKlkj8In25/Kx3dlabhHwTU6dPeVZWCHn9+l7rzRBiCUhWaXk+v3xFM6e0qqgvr61f8FxyL+8v4ZPtvFq9e212W9G+qBZOxd333dz8Ed3fvCL9WITdN7zyWd75PrXplFly+f/WE+Z+T/VWZP7+x8Iwg0yt3vfd5f11HWpe9ne8m0u/X15PTapS8t7+IjCu73vdK0++sEQi9voTfagnve9z9tfkLe61Ld/r6qnHY31k7vr7TfH5OrvFFu937rtmuX99bveEi/62CeK7d4h93/EnQcklI7w0+/9bkl9aoS+s279iYLt3y5p1rLO6+8Vofe93vSd5Peut+nIR4UQzularsp/TwwthNcuCa90Y6mPc92NL0djjybMHpZ3iu1J9Vr/SQqyfLrteayS/6kvvqiFTP76wny5Rv+/U1yy71xJOTYrEPX4CfYomHXRZz93k+r6sIzbx5dJiTlr93vWQHT9fXVleT9ZRlI0xb3uhOhV773v2Q5Gfck/qzjFPhPogibfuMIWjl62cLpk8+Xd8N/SL7XU+tPPCBb73VhDoKby2+WHiITLza9l7Zr78sX3d77VXyeniNKmbSdrkkpv3lkgijfeXD0LaRNfV608PEfChXiVy0jeWVSq390W0MK2RrcuacwXhZbfBnlevsSwgV77uej0bnmT+lySw7fd1b5DRcGiCiN/5fehGhXdyjPL+STlbDXiCBG3m+WpAslFJ62SWaIiN7+6LqILPeev+5tPeCuAAAAEq0Ga4BXwWeEBXCD3HGFEGVME8G2u7v5yJl+TW14vG2RaepZfylu6hjwWGPpbN5jC2mznMPtRJ6L9qQ3u2EITrVWtPnG9cMbcSwgj/fVbO5YUz8BF+v0t929TjVbVFm6+rI2tvE1hrNtpoM5+vUXA7Se9QQvHef+ETserxM8rfzxevLCJ7ky3cv9nf7NmPv8py5WFS/+2YYP0/yxs+6lFjseCvCVUq6gSdrr0trSky1h6ntv8/eAg9NHl/xnW1j7xNnuvw32rc7C8zpVOJ+Nc1ikJnoe1rhHtSwMD4Zh/XMtIG9HiylCgwd095Lapp7CFk7zAu4/SdLrcq7+lUUj3y+1+bL5AaFC/+2KJwmd/PFGYrDewSMLCKI5YUIKMOuk9xm5NhFblmor8lL1dhK6Tck/e96IN47BTFvWxsJPMOl/lgevYaMH//qhx+/qNzXJerqn1Vfrti29LDj0rw7di1bJwKhpoXk/HuwObus5Rfjm2//Y3M6k7vTI7OznVGmvZQWpIx886CeV52sxRLFUoddLv+sYXIMkKqIh9gatchWZdvvmfpwV9I6eNl1QnpT211Ce7Y0Lt9+tE9KsqdzF5rk/SX12pVQ0UF3FthZe+7U6gg+VVQJOskx151BEXhLzWW8v9+CKftHnv/uOpUMNGvJ8Xf5hTw1yf4KrDltvP4sjr7z37altAqIhSeuCX9fjzPyVmC77VyQ7ejvfdaF/i/qWFudkkj85+3F8FR9zBrhDzDosG/rV42C4lDQ7JvpR+xspcZA+1TQJyHJH8NrvNtkX7VNJjCZt/Ld+UdcE785O7VrhHwWFSvvc/+qeXaKVKW040VGW7QI2v/Fm3t9Fe5F7U0A33X/48t+H2fhpJ/+G5byT2+vxhG+PiQWYaurzLhgq2hf0YI/gX9H0/oT2n8EVLLjDeECNA5nNxkni93tulLd2D8jR6yftLiyFRjHgNo/fW+deEi/+J/lEvf2vZQRQEG/XvP+mH4JhR3ZbkWvh+CUoA93+P07QLefsmb53Ne7IOgz37t3RXOiQQzL0Lh3gi3vB+C4r73ekd/f4IiO92hJ7eMu93d7htaBWcpLry4ftosdr/7Le9P83P3rrqu7EkRO6cEMbX3aEi+ralKCosQwxG/57+9z5FL1BH3fXYuW7+7EFFfllXZVb9Qnve76/FkeX93vd+y/EFLTf+M3u5fL3fvf7Fm3MIeONbtp03vPCX9/BOR3EOXM/xZf3ysSV3Llm6xOLEX2dZO9Ek3frCe73f+JvfhJil3Xr2N9+nxyy+tab3vuyFu7/IIwN3wqrPf7LmBV32EfNXVdlu4aevtelTcfrv/d77U+++ifX+uuRQle8boop4TX4zYo5tHZxoo//M9N3tfCX34VzdwzsNi2G7u4u0eXDGy/1WT7lKXL9r99Xk+1NtMlluZjp/9QiR7u9x0yTPl/2cn8KLhTHDHcVu+7w3Lbenyx5z4pJWb7/r//mvX3k+3VfKV5J5PSsknel9BEnL8j+XcnpPJlin5Lvd+sLLXBLd3HVhX+kCuvu91v66VyRBSpN6cV9OGfDWb2yIq8mm/zREN38NxXQSmJX6hOTMO7/WCyAAAAUcQZsAFfBX4sZx1rqpTqLxfPzUjui8v/l/i+RUlcqNavy+3C/i/L+TJf/cYTllwi4CFije25h2vwhwpDjRfYJWz/tD/sPbTKxLGWsbj5+4j2nDpHjTAv37it3u5r5P36PcJTpHfHyv5PpWnLTBHZhl6XAYrvxh8cW3d3t0WK4+4PiTPa6b2FvCPCHRzIL+4l9/ZrIKJGHrHcKEcpTPEINm+8aDiyoiDs6KYC//TlQyUpj44710cM3TfxJOqBKtOM8KthTkdufoDKJMSJlV4en78I0OV8p2ACvU1h9ue7c75yetU+43XTOP5SXLgy8t8GUBErkX7fkqj3RL672fX4072BQJnzJIsGu5El0F3mQd6K3SM4InXumpuv/8JQ/3TfTvjOZcSqI6gORvj2jc/+nXGUsJ96TDySHEtoPLPw25twjTXQU3cag6mcVBayumAn/ZlkL+7xSNpHqH96Sn7mF5ipiRhpDy2RlplRLvfVdDY7B3W5Z0u7Y7d8LsJ70ilt4P+k967cu3OrFsJl/8swoUZ+K3/tjJ8WhpARXeB41rXsJ/BNY1uQSA04vSFP2G5Wqefz/CfZ3tBQzVLyjF7O9zGsTQM5PJRfvGvkhsDE4XOv8FBY6G8jt4cTlH8PwSefuy/v4RK8TvlmH0VL2Vyq+v13XgnMX+QWKZT4+X0rboFEqbYb7p4cuVlSn+QmtPBFdG5Dx29wrr8aXka9xbV/yd+T0qK/ctqPxvpwpBEr/IdyTXb5AU7z5s0W9jYwh7CjvvsECHoDf/+UHorye7/eve5D7mv6Jm3/2Xt5PJ7WdFtKYh19Cuq2JunCaUsT6cm95PTvsnHioQjXXennj3Hk7p7FIGBdh68wFec88f37P/hAhRYNYP02Eg7HE6yeBL+LE4bIvY9d3/w5dzBvyzUv93iZiBB54G3vsSgR8Qi3usEJcuBmfa+T0rXTLFEsdzxIhm1qjRcJbieENNifvcvccpYR6BJ0z+qdVYTE5/3d0dgkFFC8Mod14fgvLeZd718n0/sitL/8FBXvd9j8J0n3y5dbu2Y19yFu+2nJRPi15gSW92QkX8srssZnLPfejvp+z+q19q/R/YvIT61fUm9+4Jt3u73hHynTl++8PZfu4l9u7hlsOqzX8n1VZeCsrzItrabzXKLdm4ML75/Uvt/qhiMjbt+4jd7ef7G9LhCCSUpuVPxPl736PHd3d96ZztJdBCJhru5YXu+kh3d3vufOlUzCd7EG0FQOIu/0Lu93c+fxIki95I8Irex4yIcfuxWIfJX9Cb3h6KuJQ71aQv1iCwxoHTG8HXsMcK9S3jInPdgi87S2T7csfv6y+M9TiUaTl/WP3d25xXkRjvtnTtpeixpL3ROBX89A1/z723PsPbT/tfd3MPVWE13Qm7ijcdp2+2XqxRxlBtTbzsPtLoQuv3Jq5dtoar6UEd7x21Zm1k9elkybu95OJ7iWArtz4Ey+pa480v3P3svc2P49gsPl1+y6/LzIU6bRaC0cTmne8RK55/zfgJa1uz8e7k9tu+pRWK3eSV+Iu6Lr3F3HxP+907c/b4g/TKo7b08IyPPdLl7vcKr8IGEvxW88dIkzmkr0n6vtqJL+Ak9yHf6Xuvo/f/v+EROq7vy5J7bf0kQ2TE+Mii3Ty58kpL39QsT+ifxF93JEV38XGfZYxR5YOqzFVfWbqUz82fJcpJokR5cJV7f7OW4W4YyQSiJt07k9COl5JCh96/1cZj3x/4e+MgAAAE2kGbIBXwV+YZcPy4SWWJXuWCD8jl7O/P9cXcXaOb2rv8xHovcXd+e/5T3KD3QDBf/sJGw/WmCeQue69oIVJf4eg9dyP4bivl/3xne0f7pWhsX13qf4m97HnX+N5ft3KHxkbQ+JFi7rV95Upyxb+t8LV3gUZqFXBUVw3+/q88JHvKLDqReditLEmjunVQ+te9wWL/7mI93vbcbapyC4BneRtbhbCqVohvV7IKjlvfVgr99N62iNmGGGV+xA3xw+YBX43cNqStQpdXCMVbl3Eux9oiIIZ5Rc4X8rlxOOs6/y/veFPjQd8+YKPH7Vp9siD1F0wfvJWpMu4QKvHtwb0tAvHPmDx1nolxbWp+F7xzWEjRHu0WGX3/uJoO9vpwe+c1J7b9NEYKaxQ21ejcM55bOFhor06+9rCPNu7+OqoCp8tcwmX/3DhhWKxWN3Fjfb+9toaQVnt2oOH7mVIkzBMi7OesVJxdcTBfuPpNzp6KwVEPqyPE7aN3zG1LbsZj42TNCzdJ3lpx7S6CB3COew6lh+m3Osy94Blf/8RLVFzaVuEY8d+a+ene+VxFKQ4q9e/5s5Wef2dKXv3BIIhG8yphzzqZff6G7nA3hhxPlL01piaJLWCRuRfTCIMIVM4XpVvq57K5Ywie1hHwvb06d1ywN/6KFObXhJ19/6t/FrzvY3RyYz1I0z8j7n9/hrPv3GPcnpJfuCggTaebec5Aqcua8iBVKF905BK4/HY3sdjWJpFvG43UzrQY0PXbfQvhC+fjBFcP+T028T8WfnwiMpb9QRGunfoqKXBI+pm+WQr73RLQo05JE03ITgRv/NWlqNJ3GGPa0bAVYBzD3u9px/Kt1UU4v/J9pOvGiRKb3O3svCL9Mpqb+3XXeINHS67onm/T0KTrmj9P4U+OXzROPqZfegZbGnFhkafEKUjwRiS+OhyYvWlLzSoN5PStt863Slgrvdykr5yRncvdWyfepa0Y0OrXHH+9aHlDKVot4fy+nzPhHwQ7pr2/WrvoiRa/BIIZucC7BUe5i4XPVPvwgW93e73eT1SL+jFLDf0C0mRru4Jvp00Zr8t7wlljJ/fZvR3e71ZfaX3VfXk/WvaJ3fTS92ezljwo/oE8Ppcnc9Hd0/ucrnz8306xaLPl69EFE49yHpIDeT6X/BD5bPeaKJe93e7q/cJlnXu0WHL930yXnEb/JywhIv3+S7u93i2CbuRAK9wxI1M6ushXf+HraXRQ/F0n70pVJ/pb+7xsFvmeu5mUNeq8TO5z55o67UpH0/X4a4JfHvcD4Vqa/DLaf0hnR41u97d9s7D+EfND8StnzPXYm6k1P397cOjht/wl3e7urH+vSeXTpz/yle+tzTd3vJy4QWenPeEfJh33+fh4733+PNdu8Swnw/vf4KxLRbdu33LLWPBHK3qUmAXp6Tfl6HmW/4SK+4MZ8vR/fqyclX5PVvJ6ye7i9fl8trJJfCg9uRyf4T8Jb1nz9jBDu7vb4cS+C927vL7reJLtO3Ar+el9+xrIV99F9+T0vRgh0UWpt6GWa97+svv5mS935HCy6xEdlWW3KT2l2yxW7n+7N3fQjuy/fWbzXySFkxcM5kCWfpJ1vwzwayJ+FYM/rBrvT4QdiezFWSpcdXu2qy//hm7+5/bPT5JJya4LIAAABZ1Bm0AV8FvmHarL/nuaNb/WSouVvdf5S3OicPBgv/uMNquoanSjkr28XenX2CC8774dzLKiViSXyq5Vzp/3NzAV7aPUs53mYpPLBJ0zvKjhVvibvlNlxE6SoPBk96E1cTBSe93FdyvuZovtVmRjO7wr4JOHUBmLmy/+WPl5/7hBZDga66uzet8aR+YOHxS/srfodTTPqGDFASLnRfAyP+z7o3alLxFng2B6wm420f3G4TWUzGnkmIDZwbmSI/qPP+k4K4WDrY/s6e7i0MErEG8bVn/GdJr8bTRfx44Zuz/o8F7xuyijd5gwyYqxLSr5KxvS6csPnMUjcFoso+jbJUZ8LsT4+WfbZN/2uSLyFoCcHQrAxMH0vu+gnCjQMS3b8ZWj5YKbzYZ4nbmEhjVgW6cZPpJc7LCN50Li6kOeXw7yS/7uDCfP4Yuh5NWM9/8tynEN3CqbCZf/cwgVitxX2xhBWdp+4dwm+9WKiPs/nFtCzeUyqPXGtoJ9dwvfxpA7NHmeCwT0Bj/t9XAuagk4eUbAZiWfXcNY46BFD50zX6ivSP+NOhQ3T856oq8JfpnE2NaD1iXArMkjmFrxcDV/8c38Ic4HMI8hssbyNpK2hN7357vfBMWa4drpEHc4vyV70WjQvk9JMpKssFHIFHj7LZ0fV9lvH39UpaNW96DvXxo2g+70IE+z4WOyr0XRX+EfIneEnp4+uURfbvfL+7VB267mT1xce8NW76z1W/+EKBt5nxmLBYF9z0YptLwm43MuX/3Csz8xCNZlG0yy2rkfb/O5Gaku9evNsn6XndAnJmGyl+dhxJ69q+Ci4Yd42hHBL+XBEHy7Gwkfc1o1ptLZ8k9pPy85FL2v6brDJb3XGdP+M7TcqFUM4/AJd/L9z/H2PxohfOflk8e370db0Kqvs2qb9KEfBYVM/Kx8dpb13/JffRS9Uy2W+0yzxAqPkXo87YXcl94uG5e70+N0+H4ZlvHqcoXgIv1fdy8bG17B2a8uh58tqWv9P4QONHX8xZJ794ry4f7p030yZK+WCkvLl7joldmT11gjhxWtS69snptYklpCiW5zKaHftFe+C4oE7+RfbU18fp0Iv4gE0OdnbeW0/EqWN0r8Iau5/XVXXq+rCZ333fdEjP2r/RILhUYEjN+KmMgjd8Q1YKShIv9D+jdiy1x4RIBB/w/794/Mg4h8EJL0X6cFewtPxRThfle99NZK97RKYcVh7v4kug83bu7+mjQb6bJ0Z//KLOovCOVhAU7u7u94+XvJ9U/sgsp4b7vyEVu3CeEXnm776sRu7gkGlb1z/7PBT89HfvvTre4+JC/tLSW/Y2pHm62usKQ0pP5jbt3fuf24k/afKRQjd3e73d/2c/8IehHb9RW73d/ohXv0JeT067N3u/X4pFI761CJ3tXPnZ4aJ7dr+E5Q0+93k9pdvFMTt3d30ldZPSX8khLvl+/4R1JLAv9KJK8POuyVXLGEvJ5VW2yuH357tciDWeEmxKiZf9r10WCKx3creoSvfP+1PxWXNK+/oI7u+UGEe77/FEu90b9P1j938jT33tVxPDi+K7d+lhIvlfO4yX9vp3cz43cfb6of3eivx196oSXlzewWEnqv8Tl/+cJW9b0nuW792RDYTleX3vfVlu76WxHJ60ukMifHaApOlp0a32EvcPaP37EgnMN6d7u47ayGIIBALR31EMH+6Ydi+iX6rWl2xZpUxA7UvN55guXKS76xOtaa9ZShK89Wu9XEE/Zr67EwR+MqW9fwju+5WM8v91cH8KP0ggIHcXhEvdlavt3eusQW1ElQcXrTLN+DQrr8JlvfSel7UwuiayQXcv3eUHRYThSkfStbUpOGPFxn+cPFdxmrVekir0qQhiJ85cu9a4Iu7p1zOGC+SSTqdK94wv+IKSZDhL3fBZAAAAUjQZtgFfBX5hkkz6u0vyy7U5f1tyyEQ4PPEvFyeyBP4z1V9SVQY8OZY1X+Lcv/uCMg8vkgmGX3b8IQnXVPmfh0RBNHDXP2T9NOjz9OeWGbvr5ffJ6qXu4KLjp4ftzqMouxMN6YcXC0qoxOn9VkQROY/Y8/LP3L7k/XJ6CZiztUt+pRJBWb6oaWCpf/lMMfDiTKXy76G28ImlOKGInAtvbmFWMhHr19gG+RD9vdLaClb7fjpH3ZEoJEThjB0dML3+M1pQNDB4LfsFFJOTJS9g0l+Xm8yCKzxkiUmnGXWxF9N3ufxtfBbBNIOxQ42XmH+7jiFD+EpeHykfTR2WECmWhJw6naNz3zavo/mCI+Fo4s0IGkvE8dqVB8eEr9ZLjNkT7vJh3lKe5yfJ9+Xq9Tv3iCO/Lv31+UvPgTL/7YcMKNxRhocpfk6PrdoaQVuKytCXwI/ggyrEzAcVc+8p+uEx/inuV5Uvy+/djZ9nXG42Aje7duvyg5FcjjNagAm10v2Jt/DEvcGyl+OiVrK649w2HpiHnfnqG9Khopff7CHHvR2XSimmz9LnQz78Fm/k+q99SC8kIlul3HZuSf++4TcG6Ev/75n696VbQKt33uQB8i50y/rWCsxEttDdhPh+ZTLxfwCCP+HvoTfpgnvU7u++qzwS9Xe5Fb2MKd9KV4U5ZjKd9xnAEfr1/H8+Hpb184sn3eL1gqpIuf6rzIXnBN48wdXtl5ilbsn7fb4K4cQ+G/hreiTW+dwhi9/NuNbgoJxma5xqlWvxYkZgHxhsQot5/wTkOgCP7mt8QhE6fRefOv2yU0D61aq2h5IPkjjhXXCewcMP6fTlE1SK4R9EzV7yj7/chX0xoTfvBUK5aCwbbAvvrLvCFDDMvl2/93+MjF4us+GsREzw2zOptvXgqSlRfpwFl+31Cx7JNkP2G9FF0nqFTmo290+5L8qGrxaZHm7K03XbYs5gMkc5byBkoj2gX33whew1HO6v1RHhw0FduXvDwl7lf+FCh6Tzh9iQ5/f3vL71CPgh8foVKL/91uQ5dv6hMVjsN3cp7+cqjSf//hUjxdgfOgt81VDDtf/bj+IPd8wWd13fTu706LpiNgy/XC579PpwTzpqUx3d3KYhJa4JBFGuDeV2EDy4fNXhobf7eJf7av069+r1ur6/EnPHcdX/f8grd6+hZ3u93p9OJxi6/u9dbp3hHwQ73rfpAnvcbKx7d3GvdEZaseoot3n/7gjpbnvVGc6L16oqfo+l/0VBHe73vH5L0oQu+7vvcI+auqeVuHSkbcduPuIYW5yDA5Rl/91VhK9vd+sJXfP/sWy6ZW+nFHbu9p+sogfp/4q+77qjo10b9iWTL3ulxknwg91O4fn2a612+yu939GV/4T5QhdrXtz97x3KV19m7vrKW9JaiCzNyiVd6rE6dKUgISbunWlE3fLnl9d8JEOv7vCb9QvLcVuK3cVqryw/qED4fe7cMve5x2rxscSHkH597lOOE/rOd+w2rGvf5TsRTz9bk9SEz/pZxDBRve7ylqiyJRfeRAvjpjVmNyVhD+CuzzJ/+rh9J/8KE/fscKUFQq0nP8Ium2Tnxytp8sIleE9v+DDCyu9v++iyFe+qf1otaSsjRCF7iSu97v5nrKbhZZOPIXvaJatpcfiPeWkW/3ekqXpSHnnye1QlakhG9Pn+5/y/q4iJt7on5fupKBNu701pF9nCNs17hjNBEIPm0RPxGaNXmsxmT631V2aHfiJCntZIgsgAAABMpBm4AV8FfhgZnLm+HooiUq/JxhnXuWMFpQm5ov/uUnNfy93DPgku49sC2v0JR7jI/pExN5fd9X28/BB88L6tzwT8YL/UFx+Rsn7v+P5zzvot37iZYXlmNAVp4s9Hvf1eeCi5Qcf+W6VfHnnwO3EPhn93P7+JNkiHblW/EB4W8UTljIXg2dJfftwpPTnRmD3T/5a8RFgFlNO8EH/Xc1rq+6JVqPkwsn0r29jZH7J0KfSjJ+7v+MhBXBQxYfvkMrXzti9LiypkS390bjNhiL99dXqPfHY2w6ecHXl6p8XCP/23/6sJvMVdqSTuPK5F/d+cmcDawVWirGQdmERoubbG3p1J9XlRbXShS+7n+f5aOXC9XlaXfxpeHc7wn5jXgTOtTthQgrG5H/bCPTPBHwrgj6vdxBZ/fNap7ntDZYmHAZG7za8fNnZIfT/OcyMo8tAW+nOJvQdr6+LTvCTdLVtAl2E3uVG7T5PSV/Ih1EvX7/2+W+1J6Vfbgn0mMw+WbSr5QUT637cJlkBs1N311R35ZrY0lBuLi2Fy5P6f6BWbhFzbtS0SBtuwaYDn8vWrG6Ey/lp4Ior7d26bBPAx8LS0lzT03yeqvu4uCP5uv8H2j56BLGZijyRPzgzuJPXvdR3drnqV6G/CfTDzSq8bGYYz16jBohHL4aLlOCMJbikjdQSe2mL5uFy2hsNA18063Nf9JPQKt52BytMXFdS9ywOmT6p9cTLfxwonm9e6poI2VU42994BE1e+e5tdwSCA80uitvqLYl3uEX+CchO6du6xLWdfhIp/kS3e9e4XFCGg7sA7yYe48Q44Z9YcJ53+nvElQxUn8cF9fk+3Fe8Ybyz1hzz53K6rMsRZwvyRYkEmj9TeywGfJdfdeCQhA278Q6hA77lnhL508WGIRtKZP6vzwQ9yKYtpOocI50DsKlTxr32q4UgL7k/3OhenvvdOtoR6F6He91tMTjOX8Te/JOqOwSCgSPjzuicn3VAhni895Uvch5nX+XKvfuTYij8HqyX35GCm0943T8c1ThqgldPya4SeXYUvd3cKqv13EvLZpy7x6NasEmfVcXtgj7vryJYutTp+C2Gt/9yS7XY1V4Tu73vulZRG+O+3aVYJN77PiIRf2KpR9nORW4rFfdYuizXf2eyu/earvfo+qPVPvFaKR99H9V+/DyST0S739wh7Le/xUuL6nmd/J+lTTKWJ27ZbLHuOuDksrcl7TJbBPd3MHd3TCierRbgjk5Pr/+vrZMt9ftCe7u/osm8Am1WmLVVCN4ycPd7hG/jCPitTR2xpIoSyLZRN7FZ4ivcvu5BcEJd0yWT0kvWUJbEVa7nHv5CXd+/WGfHdVjy/+SxO5nbE9J0T9oyRXuTtVxPhx6W3PwmXxBPwVEXq7it4VbmNNvJPKJuO3Lq1EspJYNPKXTlmPz1RfyVwV3z4GRGl7P4rVOUT97uiQn3dz50vXkJ9BPd3LaL5G47Z+FC+9k4Khhbd3e+fvbafxBzvqzShCMDXm5PtPE8hcn7ZOXJ6+I/gj3udKWUpPyQtkvG1vJcsXKYxWIc3enky4/K9UXop79Tp6j93iGj+731jCtrSu8+Pbe/8M5IIiHj508kRUJ86MWaZvEtWWjv1eH8vwVwAAABOlBm6AV8Fvix251ek/zXPS9e4u58qfLP7iyu/ntwwX/3HG5byAoMWFU1lLVL+94YuekNS9Fx1oVg92uoakv+X97wWcMpwrY2qm+5wdSqX2y+3K2fJ+27n4YojCrrz5yDdw4H49T+T+9csZvfgqwjvgj2P+YFngbO5Tdyf3q0yM4ehnf/gmNx3a4J/3dzC/Kd5shXzCnO9s1/8rbcEEJmVwK9gTA7YB0TdWcJ17MXgTMlYdv6Lt46aF3BTXdw+3d2hlGbYlAacen8vpFwvjHI0NUHvYMZ9c47ztX1WpRGxpOi/aaKx/3GXshCwfUwrbP0yg0830fgh/BCHktubnEJosFPhp/3zFTONfr/vz1Sp1vesUL9NF+02ytU65Zt316/RSl4dcHsJ+KNy+Xwl/85Ywgoz4Kr5NL/vK/QgTr0uiixebck1t9Xn5yMC7/3za94/wpHC7dNpFzu7VJske1irsOQxPzSHNav049j7wZ0dewTrnk5akRqVes/0ldVlYQ+ihHLwZ7hQ6ny3MPQVbS+CHQaCtYWl04QLu72z0SdwB+SCPu0W9+nNlz+JpvjYNm9P8I93vd2CD8P6uX/fChrnru78oUDzgQXzyxBdjmPzDRrmQm9MhQRe56PryodwHaVCy9M0X5V/WMlvd2C/3Gws9hilNbRgMrdR1iTpMUFz1yLxUtFHHma+L9NC2WMkFt3YOVw480EtoLX1N7WzD2Lvl/39avqs/sbKd0vvl+2msFQqi4M4/HefHKTaV316wi/sEJeX5fkurfWSYrnYk9L+5iF46K3l+3F7FChnaRdQ8PYyJdeQ7dbhUqrMGPqop92dRhP/8nr/4eMZgrZZ4f5WoBQ8Je2SkXKIvT+tcScrrHOk8SHTYu4sr3zRvbVYKjOUpXP1e/EJPpxWhawVnz7KBil2bi05Z0dJ4dNm5fpotIKEdk8sMqRt8pVeChJYGOP3BVMETJoIXf5Xy+O0eEfBdvPyfuV6Emqosvl8nr/4gVx8sSJ7/BSUkx8j6TfXBbvPLH0kfYohz8gnd3ekuwkJK9X3me/rRmOsnd+kKPe931kpo3b28El54+18ZCK8rLd3eT6yfoEZ7lzKiql8nLDpvfeSV4Brvh793J60f9nI5pl3/VdelyQTyknbw7ux9+tf7/y/X0r/QIt4h/Qk9JxU/d3dxW+hL7O96l/ij3fe/cERnv+ZPen/660lfsXEle990+8VSj0/7uEls/k+s6qsIc/ygUV77yfVjEXZYor7yV7yX7Fy3MDZQzdZSkE3fJ605OuT6r/MK3RbFoId3s7vPN939/hrwJklrDbsPbl4dZz3rYIZ9jZlfHZWR0hPUTdxL7dxWK+0inTV4lhK7sPcoL9FW0m69fUIFvefN36vF8n3rlRJCO/tJ93ppIkEd9x2bJ9+JCSqWHFR6W8Je4bOon35oJzCt3d3dzC3fYkW8dn7TkdP94YNLNq93X8urb4hCDyZu/1EktT1ecl/SXuEbu4UV3d398Kv3CZp/LwT99u3rLcaeMZdlbDMNpw5e9rLZcnTgiPur5Pe9bx59p7s7VvWWS9yLcvq4QuTu+vTupjbvvD5b3u7d7ZUrRr/ZOldpPC/hQh7l2nJWNrlsuaRrTN1+uX/yetixmd9wxkglEOpj+StV8ieSIlMmjkkcyy6/fLnkkKSL4LIAAAATZQZvAFfBX4sZxmLyecv5O4vPqsuSfL/7i+HJRiJewLe68tFIMv8t3wwX/ywiTNjKH80HBzSlMem+OX3+wnCd5zuo0ZbCD/v+WCiY+iYBDMtaepY4zOJlyx3V1fDWr6h9XL6TLLBby9hDUjxBx7RPk/b68I7ng/w9BGbJ8YKPr3CJX2d3PBsr76/BGR3yi/KJeCbyrfCpf/KzDj931vQyfN3MPp28PzaoBXpyfs1K/p6syulpttfjPcpJBFw95sjZbY8zYO6BMgtvw1wy8N2SA75f18YUbELaqt8eyl1IQCZ6p4d5RHBwg/UiFk/pVc8IE18IzPr5Xv+xbnLnJd33MD6acsQVtynuf+4KbvjQrUdDzYR8dPvlvyEn7/cl95fEeuE/NlXhP0csKGFdxoFkZlTWH1UEr9AV065r4+6350HilxRwE3/D3CHIR5B3vnFdRV2L9r8JV//pWa/dV3tEuLyBf/d+7zZ9jsItd/xpXq6vcef+90wPXoXPT02P9g44CooWqJax4BxhbSyyjp5mBg8f8b57MmvCDydD9NNvfA3MUq32uSnqtS3T06zxZQg4hOKGGTyD1ruKI+9wi5Sxcvt/goMdnmVJDtL3ZPX+uEvBFtgWG34/rcPaRG/cZDluLvL/7jIR7tZvjq/nDf/el5W19iNklZiRsXAJof73wtaROxmLZgeuGrY/tobcIYhPzabc0iCzmIqi1fGk3PKUB98A2HdW+HPD8nK+fs+r8TDHv64uXr8nJ6110uG/bGmfOudBpBuSb5hovYrhO1GOpb/y/W+U97hHwWbvfd5/i3ziGXdaJ/X5YIr8O4Kg/IICK7rJnQyavBdVEp/D5SL5PyyizXe3cNdt7/VDSP/T+Co3L2lsJV/DaIbyRPIGXaayQTCQ3SumX05G+M3nPPtSKXebx4T96IwvPc5+6mhl6KT9/Epl39AnNuO+dqWHbnNvvBDIH9rjsYoS8FBXttqmqetdfk/kPHwet13hcUH5dvdScq86lZ8kW/4Ipw0ixL5vwS8qIQHY+k/UAp9Plr3ECx+nZYHv+3BGI3d/UJFnIzjuCP5856PDm71Uv/tJVBFtDRX5QfQJO71CL5SNiFdcv4nlgrPLjvwOC6dt6IJXoyN9+y+87XXusXRWCeQ9fu/VuCPu/j9lbIGn95Lu790ZvoEZXnQ7diWE73vvr2tantZLwi/SFbiufLv1WLWvtXeqPuj+qBFu/XcisbaVfrWmsJPK8SU/vG1ux0MnLpeoidI5ENvfJ/VPlKQXJL6iBV7vfX/rpdom9j25t73muQkIXvorOvXBdvT5/aEvVI/gmvu7u77eQqEpBl+XSxWrnZjaQ+6P0uTpdI3L/TUoO/fp/Ydvhp0D5n/Fa+n6nVcdxhHyYZ6/ovk/ijPFbwomLFpfkqjwXi2aHI1/Gak4qOVV91Z7cXUxnnX6KxBxWze79rUnWTy9F/WyzZl/cpuSJkuyQ5l/wBz7f6zFUVnFJfPn9wnFnn8KE+/snBUKLdisVlyW7uK3SqlVscWeL7tlBZBp75Ppp5qn7LZW3+vv3mJu/GWWtb7xe7uNyXwvmiDOOmXdMkgs4f73cTTe7vdJf1aR4Iz8uHTdJlRr78klLcM6qdPNEYZuF8woPtWbW+O5+CPdG66XPL5pPwWQAAATdQZvgFfBX5hko8N5oc6+EfHGpaVzVmGyf8XZSO442a82Y+rYBjzcwkePwUEKas3nJJJ1Xlh/JiK2MQmXSIOmT6ME2x3lr73KtL6b9gov343zDdy2iv8tyQmXk/afEyxe0Ybe+8gvVueJ5/0yiioAzYmKKX+6lyvcEhJe3mF6lEvNkKeYRlh+EfCS9S1TafJm41e3jSeHLNDLbJk+V6BnAj80/4oAw/TrmGd65vTLVRgo5/aXnhShyINphyg0INJW4zagWd7A7sFjj6K5G1DqgJavn/y7iZHfWZpZC/gVQC5WRFy89xpwBP3Sv9/9/S3Q/oBtKbqj68XTtLHabcHDWHWjq0+PEy5x/+gZ49OsiU1kXHvv/Gk8OX/u3srWeN8L+QYBmVneS032cBnMtT3cJ1x0/1SljL7mCOdIO9d328m4fe2Yfprw8UOXK4LnO2w9GUD8YgJd/yfdk0lhSW0mauF2827sPZYPJdcUyKm1xnP3TcdjKMM0+1Ni2g8w+FK9FgguyG7Fp15jTbtiwV3WVTfqpt+XuG5VmFNsKDj4/RvD0+ceD0UpVq97iLe4q2mQvG20JuHs01LrdMJ/2hfAME3epC1YdNPlHx8DL+thFs8hjNP40qGpT09Og79j72KRUqFkuFyhqlzBdJfvt/jPCD348iZBNO+DyH6LWMvpob4q7y/NsovRh3ZPX2fk9JLvp935kWVXKr/NdiBH9Pz5PVS+nBQYoFOGp1AbgpUslmq3/KEbk8JP8IhB7nXuf799HgutNF/nx2+wjDKVl/HmPkHua/gl30r6s17hHZnUWcoEwzZVCJz2/txLU1t/blg3F3xZPbbf/Vt/0KEvd+Xe2jf1rgqJnTfKwHYcpJdWRPff7O77hHw9nf0SbveuK19VkvdF6fnnsaxQqUjKNXnS21kh89ElSPTSYzy13Q1DP3/k/Sv8UZ5zxxkOX3Q8Enl96+JOOQesE/6JBfHsK/uv1/+CDzY+8NEi6uC/4arn+gpAo2Un30lXP43T3Zt7vnhHwTZcn96bq1Kd99/RKphpLowozX9UCUqkHsJXeE885qev0SCTu7ntfosvw0Tc+LxzvwnlYUu7u97u7vPQ7Nho8Wd74d0tbvvyJErsSUk7QGOq8/SXasVX177wQ3f/7S9k9Uj+lBEVylbpmE39CrwhDm5+K71gjlK2J0Xq83fd9/f1qgC6xZy5o3mNF+xPtSGd71a08HZfk+ltS8FV3e7uf3edMnru67u/k/p/yb3rtYR8lkJe1+ghrdsaBJdq72srtKfKCMrpacdV1SIRPJ6+tSp19Zc/14ruAhquD99q1ifcZLbd3CPmpLMHFooK9t7u7vEOUw0pMgREqOfwyxpZJezlCPnwo65R+zGVB1b91iCW77T2umJK7v032eUz78kMi8MJO4zpPun/UQSXvbv6tSgp1pKnV7WJy97HYxeeFZheoS8hL3v8UR3d3d3rLSEicMOVq1y2+0ikyFzTt3NZxmw+t+tEMLvyf1/TF5e1Sqv0hBbXaqju+sVfc/8K9gqFXvn9lu5p9oKHmVP5R8be6BGvb/T90zu/pP+rEadt3+IX2Eiamvkjo8Vffd5fat/5GXP4WX4TEH7it1f7Zb3Lml28hXa8kcJNVm9veeHzP+GPZFNmsllJWpSZPh74qAAAFmEGaD0pAK+CzwXisNMtcgVVmJV+kMy/lIQPSwZ15f4vnWLHPT8WdSYeLbP7RVwwX/ywTiOHYITFLiRdx8Pqz+93coqGWPOnLy7tBxypPXEyXt236aLL015ckryf37R4Iy3eLYv1eowiU7uXNO+7ULF/9sORL7v2nPv9xpMhc1gi+Z+tAZ+Pxh+regCVfKab3DCa4dq7zC++DsX0/DLwKL9wsrhQOTT/uCjci2isSGwUWixwGag/zEvqdzx6fX409OwACH+b4LUGm3ynBTtRXYa8CX9gbPpFuO6yVumNFDkkFK8h9ZKfCxNuQKq+5f+MOdLpPOr4W2OlfBbe5eRSYzKfKntjCl1P3zjutE5+UWOUwRvHvnNOD8v+C/JUXAnfVG/+FAKbpvX4J7kp+X/1BLbfsN5xINK66WQvcZx8ZNtgXMrplQ7P9IhL5cjAWRwPkE/MK3Kavdw+QVu8NwfJyU3cmKfa4Jty/zJp6MBT/PyyNP/hTQzOEnmZGU3fEt7rtK1RAODdevxL+uaaRN+5rzbdlY0ub0L2+2dfooffHDilWXDULWNPGnXvd5Pd/8KYS6evYayplR7o9T329VnvzXUwFpU5bgi4Ru1unW+vJPn2VC97ucTKHnu7hQnD0kt22nf0ySTiemZ867b8o/jV3CT+wTjntE8/317QKtlPpjD7blbLzFRmH794RkK0MpPaP4QafXgl+WpTTngl5nf1CX1nYtH6kg9mhLZk/S3fDvZoyXaauIx9l5R5tX2DcI92000Yz1+mN2ycifNrUo6xvCCxNLeHU4XHLk/vaUTLX7MAU0WL0o39n10PLe6i+QWla6+u8qn0CEzYOBzyfNCT9CQkW79S/eSnqrcFpR3vzC4/i7pVpxQyD8jHQ8+V9LikHj5wpfnQCIUn0Yl6o4/jvf+Cwx12NBF8fReKCfOuwNLxObwj34QKgQaX7rJ9rCuii77jsq9DZC3fpRRtw5bsr/1h8/lsEPxg1HsdC+VkatbR8u5sPPcMffBHbKqM3ub3BIaHeBC3FvXBMULC/l9EK9fKJWz/jQiT+tfFR+vj3l/Ry8/y/J/rksEx0ky+33SgnrhcUjlfvuULOwys+V/4KSlGWn3hB6DiXhb5zZy+/Z4ISUSeGr/d4gWceKLHQbnC/vBEIe9u6Flzq3xt0e1ePv8O3vYiru06+N8v4zp2p1MPnz3fLsfp18cUTn8IrcrD4hy5u4VebQ9atMNmvq9uX/2w2Xl6/L30/b1u0Ku0H7v3fgmI5/zjz7nernf3iMsN3+IYJfMgZbvsd9XKC6TvvuwCRfJ1sEW7fL8FJRtr+9yLToJC9sE8+dy5dP++5adRV773+E7u80nf3ZSbfXr0J9YgjjKve7v7y+/4Sone98npJFJ6gn3Kgt3uO0I+P3vi9tPr8RVNU0hmn+He05/cwlbpkugK9vurrJ7dl/l3fvE3e8/vTultQqvJ9vlb/v0+kujXv+Jvsrt/kJPCtq+C6HOnuL8j7IR8UWk3tSZ8lzfvlcE930kTygpdUY86iPi8rTcUR6d75P6/Gumo3hkSNlBsr6P//+qGEjOH438095/SWkCSbeKU1upPG/V2oIr26RS+lrhKHx75SjnNYTL/1ijCuKN4Slpb+ERLvnNHPHtx3iekhrUUYg/e8mIdV7Ohl05/uk6BIR9yppcdFlcgPPDCJ48e2/RIk+XNT+r7EWIvenfCXjcvG1b5fUqpIm6Jwp5Lr3e4KhVnsJZXLbu4rEDQ4QpciHHhjC3+Uffd5t38ioIKVCxXJQ+tUR59jfEziL1JMeIaEOyawYp0Lhsm32Htybk/fz6Nd76fS46Yj6HbtEu935L7vC/kIOytfKvli73c5vfVk3fxnVll9qYstP4k5ArVKsxHDOSQw6v+SSXM10/+IspNvBZAAAAUXQZogFfBYX/3Fisrgmz/OfHoPvS+hZF3dz5hkv/2N7QdaieWOcG+F780Pq+vY1GnXkTDnVIOsyvcbPaS4pKQxBj99aSFfJ3ngY41oQD/Ad49VtDlK7lRWa7aBeTKVMWRartWGO/uFC7juiV3HcciJp9gw46Eo1Tli8BjKv5YTsQ/pJEvee/J9pliX4fkRRPwSfO/1p5T5STw4/2fDQq/eCOXt6L8v+1gm5CYReZfDamsyZf/SKd5sMDQtlgnCRH3gn2dNSE2IlTZZbVFl/rcVzCx3zSIDlNRpvC/cx+HUncnrR39CSXLeG/7g30C9P5O3q4XW2WNu4rMFLYbEDekw6BJ3k3an5bw/NYSMQvYWOGkHp8exWNPOpz0lmv7jSTORDDpDtTu0errOsa8u5jO7/luhewUPK7pPApviaPvc+HtpsW/4fOrJKa6auOBeOOzBNHjgSdf3/3fZX7jJXQyQyj/eolmtI5/4qcLtXH3XLupdHuncbW+zf5MpaSXqfufPS/T/4S4ZbTvfJ9uW/9WXhF0zQ9J6p/yFjwv9o95YJ8bp4ev2n70SVS+/WCsym9sGX3cORbwLweCkcnBNYffJwk/TBV4rFd3d3t9gquSpxfP9rm5b9vxm680csVm+hyed5JV8OJGPjYaNf+zx7kyfWD/ka+sk/9e4ykU1kL80pQrCNg8IMW8/bfcI3Ym8BLfa7j/QMe99nYI5+gQeuF9trfsTExMVbzv/niddeCIt7j7L764LBG4dl+PHol7+4vqPEnd+eVBuEfDl34P5O67JIVIP6f4XEOHT/G5gtnkbLBDqWP/xxTBay3h78WHffnx3eGt5PfFv8LkDiSRg+wTfVIivicsWWHLWpz/wgV5oZP0kVd30VCk8PLPvJ7f/QTO8wbfmlugRCAj9w/7uJfpW6CB30rb3O3unKgU9y695CeMV8H2CQkdU7HW/wXTL13M92+ZCHmLeO0+H/LKV/J66slr8v6fFidXu71l9iyo4+YfgiGMUZSnj8vwWyo3p+UeeWvcEOiQwofY/Eicbvbdf3ozepS7mpsWllu3EkJu8JeYQ1VO/oFgmkeW5RWCdvdd+IbqbXey7R/J695OCbIGn8m0ztMkshJF/uQo/lXvX4VNFe5x/eP9/9jddAkvf2t9e7a6KQxXe+zoE5Jfd3v/qoRf4J773d378pcfn/y6cvusvr/JvGZzk9Hk1fr9Pe9Pe/TFcuXTvL+lq97hLyZYP/DF78I3bz7bV/l/3lBMc/8/6cfhGnSvkQO/k9vO+3MS76voQLP12qSPrtcy62IPClvLoEtE+5kWUG/KgRXbem/IR9KvJBTB32b/dC/F923Vi8Kgm/s0699b4JD3d0zr8Tu97gk2J1L5V2n0XT8lS3qS+9+m973tfYkgq+73/E+WDnrI6JzCXuGOSOll//IQQ0cVlz4kSjwnJLFGf3pJCzcIrhZsf2JYbPj2+Kba/pt7KR7/hAuem9y8xb92ecVbqvVrL/8gsz5nZ79PyQQcvxunRuG5Fc/F373Ii85Z8KP7HiDabX3W/hV7LGE1z6Fd0+4q5gdHg17vWMVBAb1OW9zuzq/uGycOeqO49fpX+xPJ9JW/J6qlcn7flIo/Vd56tPHnwvmiDR3G2IWGsuS/5qhG93uO942vVEn5fvN+8xd1ryS3SvJ6W1+CIt6SL4Y8hCXnz4jm+Irv3G6XzbL/+IwQ9fxvi3MMXl+r1B/kXxBSlqktpvNIFkAAABPNBmkAV8FfmGbQfPpf/cXtTve9/XuEe7bVd3/LeMTWGS/+4LyZugqhjJuPkrevwScbjMre4y7uHXR2/UoMmEn5f/cXMKkDb2O2bz1k9Jt8vLlL26vxeYGnO+EjD5L+Ey3vel9EKkLF/9lCOob5hl0ZSxEROr2/Okys8aR+fSgtb//srh5mgynNZDbPGAU27r+3jFo4Eel/hJqf0luNvjNbaeWzehET32vxVKFlilAmpcDQ2b2UvR9iT/D/qo+TG/TE///ytehlp9pO4dOApf9bu6o8PZd4CK1VPX6mHn7v/gTZw2r/syT6/+q6Gk7lm+OxuADTu56puG9jma4sdoUxoVZUji6V0fyfSWruMok5Qq3dHpmYTR1BkZen20+OK4QYlY7Gfnq8o7ZRY5C0xk9pMTP3CNNlD9/oZO2IOYj3k/aarcfd4d3cQ4GUlHkfalyDLjdmPfju7mcpcumXJx75acBCFfcv52GgU2woOFduUCrd+VNLeGUEZMzTWLOAc+w042GpMJ6gowpTdL6djR0s6BO2v8S3Zu+dwKZXZ+yvduI5+9tMyx6/pBQvTSIXu6iG7ieRr9fzgUAxvz+5sybow6iLWtPZhfhHOH2mGZpTxv+XIXPD04IuVelVueSEy2T3yp/BHzHCID34Ld7yi5Udnsn1/qYk4LHNXh+96ChO7n58fd9x8qxTVx2hNaLgt7gk/Nu97iqaEsRkvmrzgesuH9YIJB+AjA624euNE243j/HTguImO3/gp/wCb9x5igJ2qMl39vl9l/bfCBHkh0KLHyvyi99Xi5eR5zOst5494g7lXjlUPSVNp/E27+TQ0S0XJ1pSfQLDO8MOwZ7h8Sf5Ve+UJeCct5++/v3P9fViCkrSyFn9OCr+M2/SGBL28tk+7xruiCj4OlzuT3f7xN2/dcn6+fhGZdx1LvKLmC9CIF6Pp/BWVWz3VYkEM/YIg7rLuh5jz4zL4r3gpEnFr7uUT3cA73BeKJEuTnmt+2br+haGHu7u97ng4WH4yr9tT8aKhRryy3e1Vx1DbgE+7rD2lhOnKLuf6z9v4oUdryG5hqXPhKGUETBctuxP+CmtP1gk/TIuq/d85+xJcsgW9YJjZfd9+rHFvcxPd7/kvfosI7vu93d6S1FZVF5fcJPLbCl3d3vL7cgaehZT0ypW15lk99//wUxKGn3et3dvwQ4flb9zpshav04JyQ4g3Xc596kq00Wnn7fWCK99oSL5P4IpbxXqu1l7u+/eSvqtpdUX99flhDyFbt14Qu7s3Ezj2CPx+a/5faT2iFfenFvHXfOUu2eP6ohEUJf1dCfeq8n6m3/p9iUCXJfInDp4dsnq7kiLOyababCPqwT9IE/Ll5/dvJsEouf9y/Squ0hVPd97yJfzdXryUITrX1+5ju/1J3S5O98TNb6QIM8q/AT8hHeIfeV4k77hOmTGWy5orCJAkclH+BGr9/NvuwPZ6aKfXY9elzXP2/RbPufOosm7u+i/QvJQTu8uPTy/lJ4Q53qWLIyhRLfrCj9R4wp+973u9L4dLbm7X///dM18ys1cfdi4Ukc59ntf9EixZlU2MOr9FgiJLuZPd03vxHVOLQSIll27unlkKpQQvkj5f2eMyqGy7Td+WCb47lWt08Wp0SXzXr61Km6M8xUrhwe4iTKdfDPkkQhtqP8k5MhPrfe5SV8lzfgsgAAAEt0Gab0pAK+CvzDJckA/xfNa1rty2hpk0wCLxcpR3IDm14Y83RBvTfCJK1LFlze48R37YQnFw9nWm+vfgmoz0Clu5sFGFRyf0/Z4IOQmQuyvgHl+w4VcFWAxhcXNpsyV+nfBJrJVLpvcEFDu8Ez4/0utO/BY918lywyfu7rgo0ocEFIIvHsIZwa6dlLTWvwmTU4cP2rQXW5YQEPcdDDdDCLxda0CHdFcv19wpY8m0Wvu+wf3yBKm0QLpRrrGH2RFbxWjzG3y9gZgt5wb3/F5TxQi0OoQcS7F/UdD8SrRBv4z7coO5f13BFyHQzBik6S9xc38bnOFPBXmuYcyaO+8BB1cvyiy/KWXjDO0KwwMMj2+xeyerBB2qrn7P15NfHSewvZiWWzOU065iX1/GxkyIW5BXm0K3UyP+/t3b+6dq3gPV2fr+5+e8dIG3nCwNUPEQKa4DZWRkXtjMEju3K8KbWl7oHtyJwhQbRNwH54ZwbGECWUHvQJXSQluFCuUu/ppa4BBVW+Pl/VWZzgWbut1cTxfSqmitxu4J/m8eJm0QlyNDd93fCT/Q413/bxNT63LyX7gk3ulVfid3cZ5itUNfeiFhPe8q8MTny/W7h826opd8K4l3uPR0E5x94eVFDrttU/6KNpTHHMJPXBOEBX27u79fYj59vTVln/QKa1ov37eGXr0EGT93yvBTeGkm/+LZB6Sl3KSutJbhCcXYX8oPtELkuO5PSS/oJUGu/q4ze1Qm7FRF7B8r9Wf2qOlWhKCe93vtVbKKudB9qEX8SCg6zMJF+9U876r0l0JNZXKG86vsiBBgl2kwb+pPbPgg+Nx24dz9f471LmierfPhK4LKcJyf1lqNWn180ExzRkOzrmCUvfJ9v76xdYJyXcxzMm77VcJzBnU+2mEi/5P91dbXFSTBFhOcf+C2HIN+kiVeVR4W6xsRuby4HcgXfk9pon3QlKPwTmIi3vdsn9dONWT3frcJlvfd6dpMEt3vx2EbjuKve73CSzosZe7tz8bsuCTZu6fL7vwz32z9Xt9k9r+C6Ihq+7ulT6Ld36Gy7K9t2rvvvd9/iZd3vf0UuTwjmMPJ00z5d3e9t390ssnr5nqJK6b33l+/yd32f79D+1X6XzRO9y9r/dBPd3vfaQT3n13uEfCdPetZfnndoEWapts6ecwyNr9N59LS8grlLvbvRi3d/h21u5cvMvqe9P9DZiXfT8nX0vZEE59Z777Vz/6Eyb3rIzol2uT3780JkD3kclN3d4R80r86VPbc0/v+YXP+T9JPFtot8yK63yeryxtDBQrh19wn9fXtLUTd2t76vsRNd/kiZov7vyMpHbeEy+kjahe6Qo3m3TfdXy+4ie5Sj9LLG/oxME31gO6yTnU21/d54QJe7t+S9WK5yv9cZw7cI33ffdO03URfd95flkyMXvd6/JCj9QmKhLe003PMVlu9vGlu/zxI9hbd7c3rD+0/7crcJi173fb4iiGF0Jk1ftLyTE3en6BJe8W3kf2zu7KFi+aReQRivL6pmr2T3lKX2Hppt1Km/IxGcWfr3+JKGb7vSk/9lA3eWnn0vQWGPIIC1tvf4jDfjT2vzXWXvV/JEFHNOK71BZAAAASDQZqAFfBX4sZx6nhWy+ask974S6STxlo/myNmHvhubFhkt/49osbQyX/ywkTjM3UoF69QVW44W+HcOoMQtsmbL9l7gonLbL5Ac+VMnpU/tqlUnq35fqy8FGGrS3cwyQfSqq6DheXltuW/vfC1y4P+fjfdJxZKvLS2975SijJK0FS/+5hQrCy4bmeWNj8nlIlDYJT7zYwt6NpzJrvglu5Zc6yV/u8Ei9a/cWrMCTmtUS+CNPaRdN9DjggfnCX79xt8TD7bQahS15GltZguz7n66ZZcdd9/A3d++UK/X83XkL2jmF50G5OIR1nPy8Tnionq0uki3GnssW8imkeEH/7U8174QWs76VB2eKsP0See0K9DN5L1of+qTxhMCbumXpb93pSfeu5FW/9P8PpRfX7952DfrGzPyETIONB9KH95F3B/7hL7FPyenStLj8Nd929A/PzRtXwVc9gl2LF7HbAanl74rRx00FLkFmbfPS3d3000U5P0q+ycP9HL/vl7j8gUywoMFG5/ONG835oDLa3CR5xgLRmd2a4msz3Pxv6XY6XbxYb00fN93b6zxWYToFI2JbIBq3HTb9f6sft0P45VLkDuYryRRd513j1W95p9bf0K3fd8v/WCghQ2VQ+ZNyhptPwn2Cy87zqTIJeeA3J8t+Muki1f9Yy0YiZ3sgxKfdbgtJaIvwzeX8tK5Kxa+gSSryhLhq8bF+HYsbZPXuCPcor6yek0ep+CIukUP9qvExn37x1/XyYkrv7un00FRBpBm5H6UPZ97NQm/1W1CPgnLe9ysZ1Nd177S0uxbIaW/dhvkHjavh3lfk9//CNzJz0feGboLtX+EMJ08tt4Dl72BbyhoWKX9csg12XJ7dn5uKGTv7v0eLKNoP3c+In6tqWoIvNFhXqFIM5+pTBC8r3fn8r1luR31sfy/6EV3EAl7hVo/fz8u5WPt9WjpVtyCWq5fFfoUEHKCMayrLfJ72N/klc9Pr3V+j0dh3RnyfS1TaaO5SrKglKyX/Oxf4u773hIv57bYUnxqXvu9uH0kH2RbDs77f8X3d39i10Vj4EN0Bn/d95ZXd/Z2CKHkkn3Mn0//1Qu7vfe197xtl8I3vd73d5f0v1rIPq+73fcJLIKzbult+3WVVjiy07u7d/Iu2u3RNdO937PLe9v9ahEv/2Ei8mxX/Jcole7O1F3itwl4/zbxkgWtTchXcZkuurH3pZy7Le7yfSv9j/l7vabxuEXa1iDnz32W/vOKU/v/jGS++0+0hN3e7/oVoppH/1pEYVIAn9f+t/Rp3uotfwl5ZO4e93qcvS6XITr8n9XOi/rX2PE3u93d+npxF9939k3fL9/hKf+N9wk/pkH0PW+I8z4fcm+pb3S2LZrzDz6wXle/NX021/J9pWL3N19l1Yvuk3BHloix23pYS3vLuFdRgh73ivd3cI3ch77iCw2l4uU3vCPg9NW+EzhB0v+73q6lR13JvT2l0T6QJKJ5opVJ9a5JJrt/WFskhi5vX4u76UuOl6opBdLS8mGtSU0puskQfk6ruCyAAAEu0GaoBXwV+LGZ+1hCyvuX/3LR3C4tdwxXCX6C3+x/H/6XuLl4XffPkoz8pV0EEdRAYL/7hERPbUz2v2hz8vu94yJ3+Z/1D573vwJd+dbR2WEdxl6Wy2kPEXmAoIPrbW/quxl73fcv73eT+n3LGd05Z8Er0HRxfL8n6r7gs7mk6+EY4N0cjKN+Wy5O7l/34WL/5Yo3NI/+9toPWuHdfRLaXu1g/2Ca/PxmtuwCJ3dz9XrdX0pLsW7EH8E/XWN+7isZxyBfXj7t7WZT23dpVoR9v7YHXyPXsXpWLvgBzdWc/gz/6D8JwvCpwuULI5JNO7Un37q405G2fBu8lSrcuZsE6bTZRYATOGroLK2Mqhkl2mUS4gwvimVEyPNcP2+n/Gkicol1DyEzYj+knG7/cqzlotWx/i3mtFJ9elWIrf+CjZhMwRaDWIbDW8NVlA5l//CkiwKsCoew6fo3/vCbhgx0q7ib5h+zLk8t13j7Zhd+5syc6S/c0Z57cv9FuNq4/hTbCgx3dTR446pQINxGCRsS/GkMKs7GT00izt8b3BEbRXKHUNY+Qhr5HmIezYn9+xNW4XababNZa3qvBX9w5nKP1x4ToKSeXRYSLebtTIi5J6r+4Q+P3I/LBPlAuiwjwi55l7x7d6ova05a3/N42Jcvv2oKyO7kXhpLsOnW6J0nt+UXh6JzCT/37RBRkYQq7cv2uRAmJ9zAuUlOPJ/yc4aIvbQltguh3bHy1JIOotV4diX04I75KOdniyj9HhLu/dE4vy0wxLPtUfqugVEdzHbfCXh7QUfrWb66fB2UgkqddR/ane2EfBFpywd9iLe97enJhiGe/Z+T+t86JBdZYRk87301yjjOyNuz7QVv94Jvj9MdNVjd2UkcEi2vr810UvX1nnLX+IgmvHZxX0C0zJk+/EjTuPR2h84Ut32Nihm2+leT0rE3JKgUFIL73G3bY6oE/MUfL502lagnmn85crHlk/dFU5FBCRyLh73tCPgkKtvd699OgtyD2j4vrEBDQNXyrwm5/qgXyng2pfDkGT0aWPfGUgqf+sE8eDy4Nfdk+6z0V+tEY/HHfe99zSk/uvFpWNWSRgo87GP9adW9tE3d68Q2fGcsIvyoEYqVTDTR+Re7E80V4ITO+/tCy7vd/WvBHSnuW9MmzFe1cSkZ4S4hf+mbe71veqPqnRcusE93u99QmtzxhW3Lnc5W3iuEWBkNUx35d315rFnNk1/R/5hU6SU3anqqcfWurNe+03aJvenxRQSkMXCbq8l9l72dITf4JIl8V0q76pysom8EXlvHo8hAl5X/+vrEeXjMR7Ektf2zvfXmhKf8rF9ZOqdfBHCT+Yhu009FqJGaEvZBpfWX8wjshHu9LYk5Su0l2WbLdaTf6+vSmhLRfRN5PKile+tyMfNH8vI/9iQhCj8iCYqfN3d6fyFDWa8fezjU5xas3JvfXpJf6y+9p0IjePfIceu6d8qALeTH/eXyfwSkqH1+mf6V3bZJpriHHpumjlU3plv7UkR6JyeqW9CJe70pYiHSuf/DAoFaudJNTxv/+SzmHO4YzQ0KCZ/ch24nbQUmJ7a/+I6uo9nZllpfETTxVDf3+IK4qje4Uaa+sFcAAAE60GawBXwWeLFWhzk09TXf5SY7JX8Xz082/NuUz/KdY/lhjxYgaymIjOQuefcPx/TQueq+81gTNP3VK+cKKZmZGc5/cf65+/KRShGDn3He4fSkb790VW54LZaXvuil1ZeF+btmB2YY0AX8kV0eML3d3d7vPDfVamI27OFvFeXx/zvOVuWESOnXiD67Wg9hzQv4TeegL0AZc5Svq7jK11+tjvE7aqjgX4g+9xsUXNU/+/TaK9WW9gscgW+3GHI5ncnK8quoO6d4hx8HpA7K9h6a91liSeayrBb+9sVesFvDt+HPQwuuUHSjCjAPT03/YgdOPyuxXohkO8n1SurjtyhOtJuH4NKpCTwP0lqE6p9MbE4N4uvcKXiGF6Ce3IXt7umH5eCI2P7uMJ+YRwm+7a3LGkEuOKzBe7DCDiqo4Zq2bAoFnQY3+IIZwRRe9SP/4Ulf8Cb0Zbb1JH5mt2YEvq84vUocQZQ9Tmi5/p/xpbNm6Ew2Xdt1f8d4Q+vWuHoT1t//eIOYX674O9l8i3hFvVRe9Wizv02yeT97xLaCU7ii46ejLlR9NexcIXhe/lIluY9l7Vq5IS5Qype96vFxZXn88N6pzwheyV8hUblIlGvpawX7u+EfBVvKv6zXk9UtU3BQTd5x4u6XJ+r6WFCbwh4TD4QVVmy35BhTVNDF/7jkR7f2cDr5RcHD1iwkvlBaOfPwe4OfH4NeVBKqC0U5o5u/4zZpUI2yvo6fhyf6sQcdefJ7SYtpeInzeen0W56xtik/tVxsFE00sumBQ7GbDbmnhAvp1aBnNuCOwZYZKOdawe4iXULS+71ZV7RCeN7l9/VljfcJ/qsT2+5MtOnvBSY2eLcORdf+8V6cTlKkJ0Rwu42QGtDXUw296U9yCt3k93G6pv7y+/VgkPmFTheLL9fguNmLnDV5e8JeUry/peveFboalS8FI695x6QfdONkbUnG+if2u41di0KO993yfq+Wo/L7+SNyD/6Cc6B93e4Q9Uu/UEIh06dZPqvLTBWJe3e5YO5vc+m0Zk9NKz+mVyhf1mulRyetVqii/LS75fV7n6PEEMysdXyb68vviJ7Pu+xcUR3d70tuMmCmykrCdJ7vdPr9sYW98s+73nhvrBMYqvd32HbhEn9Gn+CLu7KL8s/O9/Viu75/1TK/T1Yi93n/r6vz+iTbKX6oIyrmPXe/u7uX7PPZ/U/t1wWXIe3u7u9Ltz0kSUJeCLefGPsIFfuf7u7lWYPyek0ll7LMfKi9bmuz3kpDL3CPzTffcJ+k+6sf62S95PrN/e992Kv3u72mbk5/totRhUt59vn+Zr9Kpo8w61RIugbj7Tu3cVl+7hHzZXwUXPqf0Cetlyy5Yrct5GUTljpc8I2N/L973+Yj2WT6V/+/b9KnW2um0U9pc0ly32kmSp4vV/jsviHmbsv5YW8I+TU4/2Tm6L5CQh/p+WCMr3crZPWzl8SxXd3chvtNxtlMmlT2peYnP9khA5o7uUSf9NPvbJ7XdmiSAkI75U69qrQu9939YU0x4iX42uWywcSffrH7rUsF5w8uex91a5LbrDrqMX15k+YKvJ7pie/k+m6v+7pakqLm1660uFvJIRZXk/UssnLvOd5ITK9y9yLadF5J2Je8v15oIt3pX1ZQ6lCWO4Z8EJJ2NKeSpLX75r81lWHzoQbeCyAAAAE3UGa4BXwV+LGaT8NmiX/3L035mfNlvrVwl1a3OS+E+0a62gx5vDLc5f/LCJNTh8oUps12T+Okc16helke9wjwIv14/75fy7cFHPgz4bILAlnaYW3EvE3mI7f/F4aUzCT7nxsq/BR4yyOKuN+I5jODVueUt6dfRiPd/lE4X1YV8wqM4sEHljd7J0SEMjVrCvrXqwtW9IZYpYAr0/vs3OBg9pEmvWGO6UJF3J7ZKTRV2dggmHcMRujrmCjeW7Mr1M/749IrBskZoXlCut6kXa2LtJbOxKSWuVbfP9HhQs90o+GzxPb704eX2DZ/k533H/q2klMxUVuTdfRSqroF5OehuAnGmzP6p6FnP46n9V4R3P89mBnz95PdIT/EFiMXR9NX/vLSn3BBSNzatDq8ZldCyI/sPcrfyek756gwqXuNQstxffq9FL/u4JOTU0YqX/cku7uFC/K2+FBz3LmwQIPFQ+eg20AL+/vqI6CSFem/RYUqwM6F3DzyekSlFuK54hjTt39O/c4EuvhA/kQ0uANPAIYvW9+v7uqVN9TGNz65g9/9lfa4UIndZEuTjy9/tLbCUP3L+GYsmNHfw/J/CXggfPzzdXpX73NhI0mgUv6Xh8jnSdAs/HppgcTJJonl97nDQTenYRivZStTy+3ye1Z2tFheMkTYe/C8Nz4exL/J6S/kQLJs+YvMP5jb7xuctVuMuh3IWiAV71XeIp5QaQfeM3+4yJa56X2TqRs+tswP6TDLrNpEUsCaTPexrCkxE5Qr9op95utmfPpSju6D9y8W2UfI/zCKFy5N+uGrjv961k/b/oJ9m+0+v8hzVu9pFSjBCKXIW+PH1793k/WvwgV6bJvFd3pwj4LCPutc/e2fkn176922LeT2np7xBXs4zW2R96fFIFIpcyqfXi3nXnFOXREKk7Qd0n3YrhDc7nYHcgRHxAeIJH/zxWvCBRr2nlwZYV37vpvenQJSuf+78tr4kxl0BEQ2dDkThOinrVBFMtf4Ix25Cvxo8EUid5F+SINutW61c31goOm8z8m5fueECf2tc9P3HTqPd3d8JPysI3iu4bk/dhy7x8bSL/DBX1d3XGvx/8Re7Q7B9/VL3BCSCUfXn2yoh2Ju79PkiRO73fJ6++T7giFXv2X3+gTHR3cuXMaLfIbkfrXBKQNopT+4rLzpCXm3v3BDa2zhfiy3SufOt8dNvtPyzSXqjvRC9SebrT6XeTLR/ZVCXgiw4ukVOiO7y6FT0mDTgj+Ne/2J61T9e6zwlJPuf9/iy3enrsagld3d970v1CBgCOq9Mpe13x0sTyffd8TDMIeEwtL+tJcoSyv3n/tAjEy6Sei/fqYhzZEfo8JZ1+ens8x3ob0qjnujcojd5PS/yRBTR9p8vonSj9t8kecLv/Ne+T7rLldSXpkjcnwl7IEecl6dZjJAiwomOmPJE+O/7it79wT3OR8BH+zbZq5Kru703dFu/e7guLx7pF9KtielmFdqQupRa6y3vpWlBbfEPvcW0KvVsFQyWXu73fvoInh+kb1on/LmUtfab4yX/0u/vrT9denC5fLu6CBbo7krXLndit3vd95e7yftBDkyGLdp7yaCEdX7npHtxXeX8kn/Yk/HJWwxkQVFTT8S5Vx8Z7/BLmRNZvkouerUQU5y7qVUywWQAAABLxBmw9KQCvgr8wzaVbhufKPXcn/4vmy5Cz/xfcIvMkzfQMMebwzAa+LJD/kchneTY/7uMj/RNLtIkj3u4nwTdqQEfv2rrPCm6PtAb4+d+yCZc0i9/lTrU9povDkIuDFP8UvhE/SVJdCYrRG+cVy3aqdhMt7Tp4XL/5ZjCXkDrKe2Mv989cQ5bvOCmTFRLSRXQQO8UsS9VdivaGXL3lY/PdDSW+5AGJrq8/sNlE/oL7Ml68uFFF1ahULeB9fX4wphY6F0jaGHlokuywM+0R3/7V9RZ7Ll6jgsnrtk6h8m43HG5C3+DE/DRgaJHDcXa7h+0f1eWCucXbuH5n/lQXdOavxBRyKJh+vv4LR/69QpnxvOUPzdBEDQ4l59WMn2eQvt+4KowcWnvDqJsd7yIXyi9yXLj8v/QiW5kCE1wUyxg4UbvyhIbyGbqbUO7pryGZu3jBdwgY/G6Ksb+8Hnpf+CP3QXsRaVC47x/5F6P17avhjRs0v2VjfeZULNWUv1f7Mr+Sr9x3F/8NlMPLFXH+/3ql1RJ+1E3KUsL9lhKXvlpU+8E0pfacacui3L7rrl+trBYIywsnGTXYPuRRtCfKNu98/S3P3IS6+6s/wVYfv2MtgMTM0eQVhiWibT+CzKLQQ/PniaSh0vzipv2n8JQ1nvClYbbNS0scErv3C8Ivpe5XGbPyQxLr0fl/m0vk+//CPdvI99q0IbB5YC6e8ITppv288HSw8YdH7+9r+kjpsfs73d8ktq3MUW7v0VEJTvVXhQRYzT93d9Sobxx6fK+EfBQU+zv6bacfhOK95f/1uCHR760kLdDhA61qzI85cbMa9aWhUaXh6/LQ4Qnb/SNC3qf33fEpV/W3DSOdeanxsIF1vtrS19tudfsmil4ImwZX4q+q+6afU9mkgFqG/QK6BL+WLv8qW+bN3ThaxJ8891wn+qCRonvwi/t1hObXajNvd3js56yFcJFdzyYmzkcPkhoK04Lrn+SsNooWqL2w+QIfmwo7KkEJ8X+dab5aBJJbacq6hx2/+EX9Apy+DOpbv9W/Z+Cjdz99as/q32k24orgi2/NvqgI1tPZ4dImZh61lmOijFw+0PI0iC3MJ0OWdW80+vcK0kWi/m/r1oV72Cirv+vwQ4CkTeP1q5+Q7lIuplS/0uFrg+5Nu55OMr9GM5/7bF0r0cm/Q+9e97l7t9wkvLChHW6vF3P3d75Ar/r7IV7+5bvdX+rkGUyDE/Q2iO8m9QTl3e70/cmX+p8xHdqEskJFnx7V/4QpHH1y2Ufu9LL99b7vV2f3lvevROqz17V1kKiDaIl5P33pTb33ZN3+gpuTMve9ye7u5aE152MvPnNt4reYNTsNb9bqlXXL/9d1k96/G+/bapGJl8I+r0/wSXvbeab/2eUt775rahXLKWk+3fNedQCJTvyk9+zVXb6lu+lq/eQ7veT1sXfpEMm7W/xHP8vwqvwVCnd73u7zJ+C04Ahk7+1+/3n/PCnWhrfhyDX9by/giO9ztFdu0321KJRCO/L5NLe9PBLvd7udO+F8iF3it8uZPe5Cf0mL/pUvJIdLfkwxkiN5XKoQRF+tfL5pfgqpxuH67HEo+xG1p2+S8kR0ETet5fUn4LIAAAAUDQZsgFfBX5RnDH3xfUclbL3uv5q117i7nzapfiyhqZo3DbB/jrSGC/+WKESH4fL4m+vcZCddH7gxyX+iM93aE2C8v724KqLk01eYGkNp4vCdW+zJ+M57eyflDr/K/8IxmMhZDr7iFPXiXdXR4T8vD5e92xDGf1eiCiuy+4fK/vLwgSMxF9vpveoW8EEP/frNl2xk8rxjX63LMYjzwhJJO7jNu3huVpsKhwX2LOOr/mxUIVjvruxnJJt4Zdxh3jp49WB38B62SJgc0Tsp7Hl937Cl0GiOHn+0HYBfvp+4+IRY4f/2CTNb06NUPdFhCcXf9a/Ud03rtXHDjXk+3JcvNcI+V5/gqvKD+kYNGuPXV0odIKXd6/kCr0n2QEt6cLUvCdqU20SdkEfUKeY3LlbbY0j2FXHw9LFRzjsaA3fA3cofYCoSb3pKYJeNQBeCn9/je4PhmEVI3QWIennpspnLyeIvfx0QdcXF9jV8vIkLx7BHj6TzP+FOIv+Evx+ZlDUxnAErzkp5/efzA7Ntio5c22T6Tsuzw6eHHRLzh9zCliU9z99u/WRRp0nH+n6W0gpVJXotpWkP2jswYY3VBBWlmG7DP81pTdE42+8lnXg3AVZJx21klGINTZ/e/+K815YVZSD97vd3LpC1UnrTVN4UNuOo3dShXaVTT/iVIMJHGGpISf4LCPfL932a9wRb075bU/BP6beCN8vnsK9xFwK96ibye9624LuDNUYFsF8hcMZ9Rw4Fr8VujMhoQA9S7HKv7i8Opys0Tv+OLPj8qOdBDyLq9fgunfYe4JTQ+k2gTEET9L/y/W2oIjZ4bm+7KUqh3hOsUVzJvu/rH7u/kOXnzu1FsRee771dUHRWPnnTxfXvzndlHCUxZgj/8YfBN+TXEEvHifDcuf5SQH+CLQXwTG5FzA6gTwtHA49Rfv6DRa0d3vgyWv/xd2e9yEa+gR+NlV2X3XwkWH1k8v5fa/ChtibOg3cbq2BTd/QiX7/NbuqJ6/XlFrVvVeX6o2vykM25w98Esy748ZB67VRUr+4QPd4zEe7uKi1+C6Yfvd+L6Nun+C3d3202NZFUIrysKU3T4hyK3Ly3cODvCUlvLepfb6KwR73rVd0vcM32uP5fs9j4KNz3W+8XZJRbzx6b66sSsn0rv0xl3hLJIW2Rp3neLvcibv7rkd99d0Sid19d3iizRf1XuTe4Rf2MKVv3Xl5//5JNdy+T0rK+joJ8Ktjc2XpLaE73u+mxbvv6+rBPnpvd0q1lvM/rF33kt+7sXCF93eReaXb6gmu6fz4kGT09PyHIc2/brQwU7GrnF+7u7z/iYZhDwSBYNu4jzxTKvy2rPk9VzLU0/ivaZOhKUbaIQRFZJtbBOGECrYvJ7XebYU8PpRoQuqCI1+G29aqe1uUsntK/eE9TsssfThEjJne93d+xNU46+vr+8vpquJ7lwvV1PhLwSxudTe4rppnF+QkFb72nqW6Qrem9smN5fpaFSlzrPrNZY86bTr6cpSKX6eb2ub0SbzXL7dZoruIcLDPWFSfVWTRwiCIYK4ret2MWLLn42+M0t97vAO3wtfr81nz52/iu1SJBZFX3d3GMvnnVVTICvY3zNNuNxfkTzYWyRAimGTvg41PK/LZSGt/lvver9evoxXe/y3HyI/yWJDy3HuGPIIl8s+SCKG/fPaXxPNYbs9l/IxBR78bj7MGe78lxZ8FcAAAUXQZtPSkAr4K/MM4qq9ywge6V3+W1NY3bXlmvDKw/zXnJHcGS/+4bJDOdHP7n6kvl/u3D2azHn2Jcvnjlqv8Is172ixl9+M89q1E4GW71UJ38ocz3+Mlg94e6Dd4BJve5vzv9fgs3fkkGV7TMvHyhlnApGX31bMXd69EKQ1dkWCvm44SqUv/tivNIb973tlQ0h8+BKdf3UgU+U3TkCb13rbb8tAB7wI2pp/cbgnWOzx+/mUg/kTaQm00SqpndFlBo2L+r12Z5dYAl//v4ZJ+vAdrLxN/uX/u/6NwpHUpA42cnpJldN406RYl0VYUcNgGT5tBlfGaW1II1L/5xp1QX4XNnnV1LWgpRhI8//J/e+WH6WIlWwGeAmN5I09WUBGxW9j6JoWyElCmHflCIScIrc49njKbw/9MKZ2UuctLV9eS7WlavD4sypTyQhsIPUA++UTV/Bex8m/o1UPQbrCqnaGb3uOhTeHtobyzDAk/ce3k/W3vDU8HfEjFf+X62sTu94WYi6/LNrhyfct4KPdoIDhW7u4L24x/Kpdw58KccFawcfxviniCdGULS/+zLkXiGj7KmYNJuWN4Sx+6hkDQHHoT/0OErplewL9qbH7n9eJLHRGTylxv/RPby/yHlY7yNIJkTM53d+u8uZVPW/fL+3eHzN+HHunKgMEWToMG7/W0MkfhPwWZYhNesxXSaUMmqhR9Jha93uCXeFJJJivj92Cf1E6dbGwCn9m+948FHbgprwa3hr/gT6/v7LwN/Kcdx5I6LS9TO3lOekq8wG0Ngk5pWZf/wUX3yZ78JHw52vDKLr9fguyRPfc4acGI7abaK6X4JjTr3mDV392UZo/0Owj4J/L7378EMQ5r13/JhuUz/iT8fc4UZC/b2o7sfdHDlzd4tY6K36sQOcSGvW6eEHL9ZYf3BAJbIV46JfzBIJeEnvlgu2da3sTBSaWIaSXmvJCREcf42BeEfD5zu/V+GizB5vcfNW9N5IQrlaESYHpuS56F14Dp0iQ4XH3R52mLv8v16Ru51F64JiYHaEpvAc3fryt+cUWwCXgj8/hv65Oz+8KSqpytVU9XkeEjSQsMw7SMl3Hv3glnQWKZeTZ5PB69u8EMVcG82hPl0VK9Jyrp+48p6kvvd9wk/sEhHv/rl6y9PLs+fvJ9ub+C4k15Px93feSq/0t5Rs8n0X33XoWiCsvfSXRIiMH/eO42E/Ju9WWuXf3lLd/0Z/fL5b9L3RfTXiC71kXu9jQffhF+bl/fbCJp4b3wPeJVBLPrdIFxZ/vtJnJ6SR2Xtm3S1RsgVnyM+3nASeskn/dDYKL7vdkO2T0kku8cS9d33fWMK93d3kjZQ89bl/cXc6Tufv+h5XSLluye9vaiU2HhU/Dvu97u6+OrPCb9sV3d4r9Nix/EJ5v7IP3d7ufzwoFf4JZ+XPQY4ScU3qWWEO2+6Xy3lE9UEJPS9YSzyL4su6XGEHbz93uE3347Yh9kYy73L+93vd/gvgg+ek94rBNvYcpO16sfP5MhHyEh5U/3rIvrbC3EvcVxLyxFYvLP+zsEnDrU3ljp0LiYav5/0Ik7c9+n8Z1RUvW+73e8LP2cQI5YivSeT6uSyLHFvd+fPS3NdN++At1l0erKd3Z1l9ubzy8oLt7tvpB5JM+YXL6yLihGK30nu26BAWX5S8tivcp8qeLfkyXk1rk9OjkJNUh1SwzkiJf7Yl/Xv/khqQmCTfS+SjcWf/JdhswvcFsAAABNVBm2AV8FfhwZn0JmnZf49/4vPqOak1X3LIXyF0X/fF3H2W5OUphjzZ3xnX4RJbPuWIfjlkOiBC/cdx8E7+NpFBd3wQ/XkPX+TuN+pW3E+IkAM+TEXk9ps93cFMkuRHvbl5bWpYR1nLl7eHoIHhfw54Cjt447Qgu73vfePI4lhKXy0k+/colz8bOnnwr4SEWSrzzvdwQEphngwOXPLc/DkuWod0LmUrkkItX24jOu8EFhxInFNIxHAtCQWFe2QjV9Xd6G+ym8dMw4oGZU3ZHukHKS+4wqdbsBR1X7j2d/kTC714T/+jZ6dW4TvussP2OpTh8CJWuekD+QH/eR12AvVuGtUhy7eq8NTFLYq5ZNr8nvn/ghwCDXad7+UGX+7wvedMNLUX8ZhGKEn7X9fQnoae3d3+K3lle/ylDV8tZnwZPwn4TFTbZQ766Mv7ZdhE2YLkBlkB+IjKPrkM+bDjwpdZWFPGj54vvMa04Zi0WJZ5d3OCPAbdvn6xvCXD27hZcMdS0WDi83WhXevcX6JhRx/8N0nr0j/qhh2n64M2do65DJEaPpEHCuxGzf42YHfXdbgwuW06Lry99fu9vT7VWQJlpgkVbbzDN/bIS75fvf9/cKE3h9Dynd+kzgNJP+7dPOTnGRgaRPYvyn5cOEn6KCcU9vn77Mn3eJdO8bQv2ghBNue49XmE+eyktNiO72h5Q5O8fuiWwYfJECR/jaulqQktB1Yko3X9nZsO30zH3Xui7KC797yR/m5amBz6BVht2LSrA0ZrD6zau+5BRsv1vYnyJXd7cI+tfq233125TyR/Tjq+z7hYZeUKyqpwuoe6U/5Pv/x53d9Fj7vPr7wgZqSFWW5Ahdb4aENZz4S03vk933etOLVsua/sFpdw1L4TGUOzfXeCLmq/fQKiR4XZ/ALP7y/3AR8E9z987Onr9cTrvv8E0g5fLR154a17qx+UpA9ff/ZyArve773/ye7rTdgovfL/vxI0nnXvyRmEXyngsGPP7l8fuPUd7aaElbKW4fzX3QJLviwelXj81//HsjalF67Z87/EY6JDTsInKkbW/0EiO793rqwoV33pbu+7p9N9Cy0jXu4eXCurdNCOhJ+IRivva+CapaHLXvucQCbSUnpvqvbCe9E97sahfdO76aU0EN7y231Fbve722VupQbVNwnfd7uEX9/Qre+eGT0q3t3e+lSJCYmk1ve0k8Ec/7pwvMqCkf7cd4S8fuV+Hf+TUu2hqE0ROtEmvsqryaV6raDm93+SK6Tqrl7/FR4nGcvMhfcND9S5GIGFDxg1fbu4R82CX26qNLlT/Nef76ZCi05z7Ppm7b/Lbe6J9at04SuVI+xon82uh9k/GfO7Zce71sO2/vWe3PV89qpP9eoRl971z5y/L+J7RQrbHvhL2ZvVdlvd6toXBJz8o+ayfrbtb8uKj8n6XqT16r9Ui6Ke/CPiH7u7+K8KrMbBKKe77uxp88MFCHBhrfByj7VuslD0/+EM6sw+tel3d97Sf7L79p5/X6wt5zsC2XeX2+aFyOiKf6vyZ5fX/Zbu701dBI5e1HPo3n9N6ljZo34fJfCF1zdJLuKp53ZpE2+sdy9x2v7ss+fYsOwWH3DGaQYQYkj/ERuTqPFR7P14go1o5Q7Zqr5Ln/BXAAAMBW1vb3YAAABsbXZoZAAAAADdnh1c3Z4dXAAAAlgAAAu4AAEAAAEAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMAAAV1dHJhawAAAFx0a2hkAAAAAd2eHVzdnh1cAAAAAQAAAAAAAAu4AAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAHgAAABDgAAAAAAJGVkdHMAAAAcZWxzdAAAAAAAAAABAAALuAAAAAAAAQAAAAAE7W1kaWEAAAAgbWRoZAAAAADdnh1c3Z4dXAAAAB4AAACWVcQAAAAAADFoZGxyAAAAAAAAAAB2aWRlAAAAAAAAAAAAAAAAQ29yZSBNZWRpYSBWaWRlbwAAAASUbWluZgAAABR2bWhkAAAAAQAAAAAAAAAAAAAAJGRpbmYAAAAcZHJlZgAAAAAAAAABAAAADHVybCAAAAABAAAEVHN0YmwAAACmc3RzZAAAAAAAAAABAAAAlmF2YzEAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAB4AEOAEgAAABIAAAAAAAAAAEKQVZDIENvZGluZwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//8AAAAwYXZjQwFCwB7/4QAZZ0LAHtkB4I/rARAAAAMAEAAAAwPA8WLkgAEABGjLjLIAAAAQcGFzcAAAAAEAAAABAAAAGHN0dHMAAAAAAAAAAQAAAJYAAAABAAAAGHN0c3MAAAAAAAAAAgAAAAEAAABbAAAAonNkdHAAAAAAJhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWJhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWAAAANHN0c2MAAAAAAAAAAwAAAAEAAAAPAAAAAQAAAAgAAAAeAAAAAQAAAAkAAAAPAAAAAQAAAmxzdHN6AAAAAAAAAAAAAACWAAA1rAAAARQAAADbAAABfgAAAb4AAAH2AAACXgAAAoQAAAICAAACjQAAAsYAAAJeAAACvAAAArkAAALeAAAClAAAArEAAALjAAAC9AAAAloAAALZAAACiQAAAr0AAAK6AAADTAAAApsAAAL+AAADEQAAAtMAAANpAAACjgAAAuQAAAJbAAAC+wAAAzEAAAMjAAAFBAAABJUAAAVVAAAFCQAABTQAAATYAAAFEgAABYsAAAS9AAAFVAAABPUAAAThAAAFRwAABbIAAARiAAAEJgAAA/wAAAO/AAADaAAAA44AAARGAAAGSAAABekAAAUtAAAFbQAABHwAAASTAAAEmwAABO4AAASAAAAE3AAABMgAAASfAAAEhwAABKYAAASfAAAEZwAABFgAAARlAAAEjwAABHEAAAVpAAAFZwAABYkAAAWGAAAFzQAABQMAAAUyAAAFWAAABTAAAAUHAAAE3wAABQ4AAAURAAA3RgAAAesAAALYAAAC9wAABAMAAALwAAADmwAAA8IAAAP9AAAELQAABA4AAAPfAAADtgAAA9cAAAQZAAAEUgAABMgAAASdAAAEvwAABF8AAASUAAAE6wAABSYAAAUGAAAE5AAABFgAAASxAAAEgwAABLUAAASuAAAFPwAABIwAAAU3AAAF9AAABXMAAAT0AAAFXAAABJ4AAAUBAAAErwAABSAAAATeAAAFoQAABScAAATOAAAE7QAABN0AAAThAAAFnAAABRsAAAT3AAAEuwAABIcAAAS/AAAE7wAABOEAAATAAAAFBwAABRsAAATZAAAANHN0Y28AAAAAAAAACQAAbdcAAOgMAAEi4gABh9sAAd8wAAJLaAACqSwAAxNjAAOmUQAABhx0cmFrAAAAXHRraGQAAAAB3Z4dXN2eHVwAAAACAAAAAAAAC64AAAAAAAAAAAAAAAABAAAAAAEAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAkZWR0cwAAABxlbHN0AAAAAAAAAAEAAAuuAAAAAAABAAAAAAWUbWRpYQAAACBtZGhkAAAAAN2eHVzdnh1cAAC7gAADsABVxAAAAAAAMWhkbHIAAAAAAAAAAHNvdW4AAAAAAAAAAAAAAABDb3JlIE1lZGlhIEF1ZGlvAAAABTttaW5mAAAAEHNtaGQAAAAAAAAAAAAAACRkaW5mAAAAHGRyZWYAAAAAAAAAAQAAAAx1cmwgAAAAAQAABP9zdGJsAAAAZ3N0c2QAAAAAAAAAAQAAAFdtcDRhAAAAAAAAAAEAAAAAAAAAAAACABAAAAAAu4AAAAAAADNlc2RzAAAAAAOAgIAiAAAABICAgBRAFQABKwABwAAAAAAABYCAgAIRkAaAgIABAgAAABhzdHRzAAAAAAAAAAEAAADsAAAEAAAAAHxzdHNjAAAAAAAAAAkAAAABAAAALgAAAAEAAAADAAAAAgAAAAEAAAAEAAAAIQAAAAEAAAAFAAAADgAAAAEAAAAGAAAAIQAAAAEAAAAHAAAADgAAAAEAAAAIAAAAIQAAAAEAAAAJAAAADgAAAAEAAAAKAAAAAQAAAAEAAAPEc3RzegAAAAAAAAAAAAAA7AAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAABKwAAASsAAAEqAAAAOHN0Y28AAAAAAAAACgAAACwAADXXAABrgQAAwYwAARKNAAFhWwABztsAAiToAAKY1gADEjk=", +} +BASE64_FILE = { + "name": "test/test_files/sample_file.pdf", + "data": "data:@file/pdf;base64,JVBERi0xLjQKJdPr6eEKMSAwIG9iago8PC9UaXRsZSAoVW50aXRsZWQgZG9jdW1lbnQpCi9Qcm9kdWNlciAoU2tpYS9QREYgbTk3IEdvb2dsZSBEb2NzIFJlbmRlcmVyKT4+CmVuZG9iagozIDAgb2JqCjw8L2NhIDEKL0JNIC9Ob3JtYWw+PgplbmRvYmoKNSAwIG9iago8PC9GaWx0ZXIgL0ZsYXRlRGVjb2RlCi9MZW5ndGggMjM2Pj4gc3RyZWFtCnicjZDfakMhDMbvfYpcD2bzTxNhFFZYe90h7AG2tTDoYO37w9S1O1A4cIyo5Bc/80mALR6pLVYY3k/hJ/RMJh6J82d4e4Dvlo2WRu1tb6UEPV538Hc4H8NqJ3C8DAWnDIQpd4lD2LdYomzcZ9O+Km1qWG0VSCRKG+xQD4FuTZeWdTcR0CiZiqtAPYXOGKOhEBnUD3hC5M0a6lcoObInwdIErsAHcI+F3cknsB3ANFJCU54Byf6B8AAvdZi9s8WokcXNFrvLEj0n0gXu5Hm8TJyiK6nm+54Ipd3IXnQiae5H5vyxTf724RdvlHTtCmVuZHN0cmVhbQplbmRvYmoKMiAwIG9iago8PC9UeXBlIC9QYWdlCi9SZXNvdXJjZXMgPDwvUHJvY1NldCBbL1BERiAvVGV4dCAvSW1hZ2VCIC9JbWFnZUMgL0ltYWdlSV0KL0V4dEdTdGF0ZSA8PC9HMyAzIDAgUj4+Ci9Gb250IDw8L0Y0IDQgMCBSPj4+PgovTWVkaWFCb3ggWzAgMCA2MTIgNzkyXQovQ29udGVudHMgNSAwIFIKL1N0cnVjdFBhcmVudHMgMAovUGFyZW50IDYgMCBSPj4KZW5kb2JqCjYgMCBvYmoKPDwvVHlwZSAvUGFnZXMKL0NvdW50IDEKL0tpZHMgWzIgMCBSXT4+CmVuZG9iago3IDAgb2JqCjw8L1R5cGUgL0NhdGFsb2cKL1BhZ2VzIDYgMCBSPj4KZW5kb2JqCjggMCBvYmoKPDwvTGVuZ3RoMSAxNjgwOAovRmlsdGVyIC9GbGF0ZURlY29kZQovTGVuZ3RoIDgzOTM+PiBzdHJlYW0KeJztegl4VEX276m6t/dOeiFJd9a+nU4aSQOBsAaQdDZAI3uABIkkQCQoyBJQcCPOiGBwHweVccRd1EE7i0yCjjDAuCAIo4y7grg7Iui4ovT9/6q6wzLqvHzvfe97z++bezm/OnXqnFpOnXtuXdLEiKgHQKV+o8vKR7HBrDcR90I6bPSE8ZNXVmy4k0hZg3rz6MlTSqxPm64jYhHU+42fnF+wfOjmfdAX9dqpZWOrJtxywddoSiJy3Tp7Qd0idjf7Eu2VaJ8x++Kl2j0Zr/6TyH4OkbHy/EVzF+xeUb2eyH036hfNrWtcRF6yoP8R0HfOnb/i/LWfPPI+UaqTyFbSMGfB8ttq5/aAbhnI3FBfN+dg0jPojx2E/uAGCNwDLCrqmCPlNCxYurzv++ptWBzmQ5/NXzi7LrV3+h6sB/3R8gV1yxcZ1iU0QR/zIe2iugX1ntr+bxMZUGVlixY2LtXzaB34+aJ90ZL6RbmvjN2KrrEe29OQKWQmTi5iug5e+HI4fUkj6I9kgtxJ+TQVo/8JugbUFZKX3lP0+TMX7E0jo+Oo1EnHHj92qVNKTruGS4mV+uI21C2pm0Xa7BVL5pM2d0n9haQ11M9aQtr8uqUXkXayTzKkrn94ZvmKmY4RX5vTzVJ873s980T5woThm489fnyuk8x2VC0nRhSlPc5zrCYm60lnAEO4GdaWDyzAzWgQbkbDcLO4BcnVJsW9koT4GoMyUfrLSOWonUPjaRJNg+eIyk6t6++dvH/iAUVZw26CN82G9YYBmFJ6rFT+Tudzt9nAbUaVi0ulf/Pe2PHjxlMYI00zvBydyAaYRrLWsNg4jK8GDU+KHSb1Z/fl/+R6muXLe3fs5hnyfkvcav+u23BPfF9LaAYpckd7x3ZU7mVSbF6YKYP3TvLsFB4uuLB+CXRPxbgPhB6H55mkRGnFKYNSZH/5sb3T35TYgCfrJ07//+cyPEt3GabSvafU7z+1XW08+WwZC2n2KXr3/HtfpuspVRQ0XUSpirxDF1BTnGfYjYvjPIfPGuK8ghg6I86rp+gYKA1aMd4IjqiYltA8qqP5NJYqkQfqUW+EZCGJp3MQnuD+1A/tY6VkIS2lFbQIWhqdRQsgnwvdi4Aa9QGd7E3DU1IP+TLwdZCeXjup9zA0CzBCf9waZtAg+/7paKWoLQEvsA7y2Az7yjHnx8ebhxEa0NYYH71RruZi4BxoEon3RdhmNXdvE01GkhkFhTnGwZFINzZL9+wtZpGppKUlxpENlJBg7aa95YS9NW6fAHI4bN2zt1ljEzbLFCmNHCCnw/6f7bouuy1mZTnd3uVK+N+2d4F69Ejsnn1iQmzBNjmuNMJLlZKTnd2zdyTGrDC4MzZ1SgZ5Pe7u2bucsQknyHEFRx5QekZS9+yT3LEJO+S40igDlJmV0j375B6xCTvluIKjLJCmebtn70mOTRjTSI1x8nXrz07tnr03JfbEwD4txlE2KDeY0T37dIyTTnLmmTGOgqC8PK179lkZsQVj5v4YR+Iw0LdvoHv2fp80FJPPiXEyCRQUBLtnn+OXhmLTesY4JCoc4Ab36p59zxxpKGaeF+NoMGjYsN7ds8/rGVuwRkitksPBhai0pKB79v1g1Q9lLtHAGIcXN1FFxdDu2Q8uiE04T44rOKoATZ48snv2I4aASDq9OMbRZNCMc8u7Z19yZmzCODeNiXF0LmjO7Iru2Y8plYaE5Y6LcfJFa9hCqaA0w0OUqgZFXOsfgT4WZXSe/rFoFyX/FModcSLaSJvYPNpEW2k7Owqrx6mT2uk5RGcZ3UmX0620Gm+K6ZBci3fPJLxpy+hWlqq34+RyD96499Ae6E6jK2kLpTCv/gmtpFXKy7BahQyTDRdNwBvtenaOvgynqwPqb2kIzpoX0SLWpFfpN+i36PfTA9SpPKcfR0ZMw1Jm0x79c8Nr+lsIjxn0e7qDDrBbLE/gzT8N54NO5Y94961XalSmz9WPYQZ+ugRzUPFu3cO28RB6r6ePmJddrpSil/v0iL4TWhlUg3foetrCBrHR3G+YoY/V9+AM1oeWo9c7qJU24+6gv9AbzG44qt+vH0V66Y3TwEr440W2TYkevypaJBwNL/WiQrQspKfpWdrHAuyvfKHBbigwhA2X6vuRE/vTFMz2IVh+yL7lV+JeqTyjjtJLkLlX0c3C2/Q3epel4Ww6nk3lvfhCfpeyBO+03vLEMAfv/GvpdvT+DguxzdzO9yr3qY+qPxgzowf1ROxIkP6A75y/sgSsVGON7DfsFfYeL+Uz+R/4IeVW9WH1JVMdVn0eTjPX06P0LXOzoWwiO5c1sMvZanYzu4PtYfvYx7yYV/IL+RGlQVms/EUtwT1ZbVR/a7jGsNb4cbQqujP69+i3eoF+DU1EPFyF2f+e7sLKOmkvvY77AB1iBmZjibg15mdT2GW4r2TXs3vZRvYwa8co+9gh9gn7kn3NfuA40HEjT+d+no07wJfwS/it/E6+F/c+/hn/XvEo2UpIGaSMUKqVhZjVauUm3E8o76pp6l5Vh58LDOsMGwwbDY8athuOGu2m35jJvPvH+47nHX8nStE10XXR1mi7/i5ydCpiKoN8eE4n4nxVhzPmcpxRH0Ccv8zs8F0ay2Mj2TnwzEx2AVvMlsOTV7P17AE598fYU/DSq+wI5pyALwcx5758EC/h43Gfx+v5Yn4Tv4W381f4McWk2BSHkqzkKaOVGqVeWaqsUNYpEWW38rZySPlG+RG3rlpVn5qtBtWQOlqdqS5T71I/Uj8yzDC8YPjAaDUuMF5j7DB+YRpsGmmaYJpoqjHdaNps2m+uRXTuoCfoz6emAnZQuUopV56gG/gANZW/yF9EPM+kOcpYjkjlG9kafgVr5zmG5cbhfDgbR0fVIHz9DN/Av+HDlbGsgk2mC3j/WG/GJPURkd/UHXRYfQprexE9Lzfa2ZX8iNFOrfhcKcSYf1P6qSHlBXpDOcBM6j30pmplHnaYP6RMQBT8RR1pqCK/cic9pixmV9ATHGnR+oP5OsTxOPYI8kIlK2DfKfhi5+MQRUOU9+i3dCF/jQ7jOV5Dt7E56ly6gQawy+kjehBPRS/DRcY8YzJ7ns9Tm3kP1k5cfRirK2Q5TDEk0dWsRllvPMJfxyl8r2qld5Q/YfZ7+WPKWPWoYRJrwBNwBV1Di/WraIWhSn2JzSWFTaVc9SCy2+VKgepHuRJZZQZy2mY83VuQB4qVsZB4ETnnIC6mIEOsx3078oSKCJqHZ3wastiL1G6s5B0015DIkHXwBfRCdBJN1x+kO/S5dJF+C/VBPlitX44eN9IHdCNtZKuil+G8n4Un5x12jmEU32sYpffhzfx1PpmvO31/4e1c5qVPcT+Gykh8Jzerr+J1U6Rfp/8D0X0GMuwdNIvOpvexys8xwhhlGw2IjuMt+ihlEdZ7gCbqD+k+ZqUGfT6+8Z+iB0wGqjOFsMcR9hLWexnV80n6UqU+Og9+uBFeCMNby5B/rg2XTqksDheNPHPE8GGFQ4cMGjigoH+//L59eofyep3RM5ibE8j2a76szIz0tFSvJyU5qYfb5XQkJthtVovZZDSoCmfUuzwwqlaLBGsjajAwZkwfUQ/UQVB3iqA2okE06nSdiFYr1bTTNcPQPP/fNMMxzfAJTebURtCIPr218oAW2VMW0DrY9IlV4K8vC1RrkcOSHyv5mySfAN7vh4FW7m0o0yKsViuPjLq4obm8tgzdtdispYHSemuf3tRitYG1gYt4AotamGckkwz3lA9rwZd+AiYVSQuUlUdSA2ViBhElt7xuTmTCxKrysnS/v7pP7wgrnR2YFaFAScQRkipUKoeJGEsjJjmMNk+shtZqLb23NV/X4aRZtSH7nMCcuhlVEaWuWozhCmHcsojn0ve9J6vo3F1atfrU1nSludw7TxPV5ubVWuTuiVWntvoFVlejD9jy3FG1zaMw9HVwYsVkDaPxVdVVEbYKQ2piJWJVsfXVB8qFpPYCLWIJlAQami+oxdakNUdo0gp/a1pauFM/SGnlWnNlVcAfKUoPVNeVZbQkUfOkFW2pYS319JY+vVucrphjWxIdccaecCpTf6JNclJdcBWTTniWiRkFzkJARLTZGmZSFcCahgqoH0rNs4dCDVc1g1VkDnZkXsRSWtvsHCbkwj5iyHUGtOavCREQOPzZ6ZK6uMSY6/yaBCvi5ESoob2Lj4RCkbw8ESKmUuwp5jhS1gf16X1xBw8EFjk1FHAfTYBv66qH5cP9fr/Y4LUdYZqFSqRpYlWsrtGs9FYK54eqI7xWtGzrakmeIlqaulpOmNcGEMnt8n+SkiPm4Il/DmdKj/KGYRGW8h+a62PtFZMDFROnV2nlzbVx31ZUnlaLtQ890RbnIj1Kq5R0Hud4uiJbEZQzTiiLSpU9oubin1EG9ZwOkxlRKSVMGxVx1o6JYbXV7++mUYd+VFjJ4qRZfJqRYaHT68NPq582PXuzggnjVVlROb252XpaG0ItNuBZ8QIRT5VVfq00QlPwZObiX4e+baig6vRIGC4rFQqIv5goXj1NMT3OV+MS0dmn9ygkuubmUQFtVHNtc12H3jQroDkDzZ18O9/evKi8titwOvQta9Mjo66rhq8a2DA8FJxKWgJszcSWMFszeXpVJz6ztTWVVa2c8dLakuqWHLRVdWpEYSnlQiqEoqKJClUwLLKVm6V+emeYqEm2qlIg67M7GEmZuUvGaHYHj8mcXTIOmRqThaVMXCLHlFZWnRo98pGsxmcdLO7CAXs6vlUclMlSw27Nx0rNGZlZmL3LmeUgs6dDj7bb7SVTwHzZbrNJ5ptwtj0BXFCzMF84IYFPsWhOJ9DqcAC9UtKhfxXuabcbp1jSfJnORGHqtCbAzGkX/Tk1pmEV0o7QZbswlYywBnMMw0rm23bRC5jvwrAHV5M1fIY35PwmJK+aEceBI+LVmsMAKhpxfISg/v1KV4QHK+kms9FsMKtm1ZjqTfNyo81qtyZYFWNySlJKjxTFmK54/MydCPCaM/wsxeryUyjEQqE8XFexmgEuf4EnxZPiTk7iiTyQ6y8YPGTw4EEDgz2DAf9d7PtHp19ZvbRx3KU371kVbWGFNz/Qv3zsbfPHbYruNmxJzjxnVnTvzoei0YfrCjYN7l/+yYMffpuXhbXfi/OL+E60UXs42WjIMptNJlJU4XyrJctGZhOCNJzvdA80VSpna1YtgVvTElQLF/6zSI9arGIjLN325bF2i+WERDr1aJdT7cPP9YbGOb8Kdbl1rPTrOOc3NWO/ev+kT92F+SOcwrVwSrI/TveqOT/epYR+/IdytWHLpmjRn6IJmzCj+xFd2WKFzN5JCVhMSo/kgaqSZbHebd1n5VYD5zYzdqYryMxdQWYWQWYRazNrJpOxQ/9crgnMl2GbWJTRKVaE+sFwns1mnGJkYj3GmqYElsBt0kM26SGb9JAt5iHhTyum8J9cFbZJX5lFr6dHX0rcUVoC0xImJNQmLEpQh1d7QzWLu2LxZDTWxCTwlEA4r2hEYU2+DEkWGuCC70AB4P3b+bHt248bDVuOP8inHxvF246PxUzXITby4DkD/SZsZxw+M5BZU5nawR8K+01ckUtU5BIVuUSl20HwzU8eKOPPPVAf1sT2XOy02Ot12/lLhi3H/rVJ5I3Z+keGtw378X2dzlLCFWkOluRMSkr3pKerqlNNsnls6erDns2JzyQqHo83nWuZYdf4HuM94bQqQ5VlmnOKa2aP6Z6Z3qlp09LXeu7gztQsRXFn2SzJXbGQ3BULySIW5BKTg5qJ4aH4SsrBfNwuVmsS4SEWCeaoXCSYT9vFBkplsUaT2NkisW5TWlMmy3RI/zmk/xyyc0dQuM8sBGQXAjLKEDBKZ6VmzJ5x4vGoGSuyzLiuTe4SUNHhosPY35rFVFNTs7iHk/wFqkgZaiA7hw9x0oACcg3kwUA2zWZr2OAX2KhH26Obt+6Nbtn4HMt89U2WvuKTm1+Mvsp3sQXsj9ujD7x1IHr3E8+x6U9Hv43uZQNZehuz/S76Afx/D56sTYgPL2XzYWG/25bI3IMzpvvONy/wqRanWLJZokliDiJfeiZBOEQw9i7G1sW4O/RDbe60gSiPtmX3HOgS9cyeA53x0hEv0f5aW2Yw1g59Z7wU7eGzwOQmnp1xtjbZNiNjQcYSy/LEFY5V1jWO2xIednQ4Pk78yOFMtNs1lyPJ5XK4HHaLO53701KsRnzLJNgNXoslxZOWmuURM46/b7aFk8VWeDzkzxbZkbxehyPRnNUVKlldoZJ1Im1kBRPvNIoAiaeN2FMg88VAmTmMwi3GGi1nUU5TjpKT7ZUB4ZUB4ZUB4f1fPlDxVGH8aaqIP1eB4Rt/LqfGAyf1fW/8beXEHc+todBxVArz3Z5C5vIUrk7sGzJc4dwpwip06kWiP5zQwlZz2FHocA5zuYdBVM0WQ9hJifo74bTUQld2aqEblBjOKHRmJ4F8oOTCeCfV4sWWgg9JowlvN0+PgNKX45UWcEEs328B/z28eefuS3e9PPaMKefoX22fctG0Pv6Kd9k9q9aNu+2+aD/DlvHPrbjzlczcnHHLootZ/6uvG2ozHV+mDBiyYnTDNeKvsalEpotFpPLLO8mhR4XTSqZw6e7EWFbCw9ehH483KCca5LMpjhG9BKcaY7lOIJfbpMqDhCKR2+Nm4nGXZp92MV/JERDv+9ttkBjA4J0BrhcFXb3cQW8hDXYVugd7z6LRrrPco71VNM1V5Z7mdd5uvt3BW4zi/BQe4GRpqaHkgYaB9jJDmb0iudJQaT83eY5hjv3C5KWGpfbLkh2GZLtCzG0mswMnNcRpkbhc2Moa5nIXFqaHsxTVYOBGE156VizXkpDocNjxGe9OTvF4vch0I9oM5NVEaXe7RBmenmy2aIQ3pcYoiTHyGszmrGRvUnKy1223WLKS3WDdLrvDoTldSU6ny22xm73JBofLaSeOKRkUr9PhsFjMZo45ed1ul4vMaR5PmrPYwiaSRnZgMihMBjZxs6YxxlJTO9jalljw1qSljj2e5j1+PC31uHdceX3Zhyci1hm/RbBifa4uKixcPbZvaPUVO1f39f60QOCtTnTu3AkYsbOLOxVYRcQxuSLiwoG11W314pEbOrQawlwI8yDsJBKnd6qI2CBJhKTNHjaEoVSN52RJTezsdvrlZwN6pHgGD0HhRtFjAAuwYE+jibG7opc9eyAnbaiVeT59aXwgo8+HO6IXPRl9oafJkxR93rDlx6Lbfv/PHOWd42nRz/61tl157NgoteY6rX70D/fhCN1JlcoZbUGvb99TSi86COJKr9ZQpq9T6alktg73hTuUQJs7ucBR3EcR+SRfogZcCHoctFURv8WYqYgzoRO4EtQEehy0FbQPZCQCilYNtBC0AXRQtCiZSkar5nMW91RSYZuKt4ND8dARkA5SyAfMB40HzQTdCNoAMko9IVkIWgnaCjoqW8KKp/WWAZi7p3WtLNoumF8gq3Wx6owaWW2bVh0rx06MlWVnxdSGxdT6D4yJ+5bEyp69Y6U7t6BJlNaEgm3FKUoKFpmCiS8CMr6THAh0H92tJFMExBVjXBJW3G05wYINWxWVmMIVRnPIp29TWGuCq6DYynV+hNzk45/zw7EWfrgt0VWwofhsfogeB20FKfwQ7nf5u7SSHxQ+BxaBNoC2gvaCjoCM/CDuA7jf4e+Qg79N+aAi0EzQBtBW0BGQib8NdPK3xDe+RMEXgbj47Qtqb2JZbwId/A1wb/A3MLWXW4cUFnRKJpQfZ3y5ccaTHmfcKQUd/KXW73shooLYaUTUk0o2jaQBSnZrbn9fh+JtHTHP18Hfa9NCvruL+/H9FAFxzGQ/Rt5PGmgCqBa0CGQE9wq4V6gJdBPoblAEhCgDOkEa3wXaDXqF+oHCoAkgM9/XimE6+N7WYImvOIW/yJ8lDzy+hz8ny938GVm+wP8my+dRZqHcxZ9pzfJRsQ3tBBsnSifKfLQb+F/bctw+vdjFt8J3PmA+qAg0HjQTdCPIyLfy7NY5Pjc6eZJ2mQmarfSJLB+ke80UvsAXDpYiADUBwWFnggNs0DYEeTi47g5UBQRvuAWcgODV14ETELz0KnACgvMvBicgOOcCcAKC02eCExAcXwkO0MHv+nNOT9+Q8RcyrdjBL4GXLoGXLoGXLiGVXyJu+l4Vc/tDa14ePLY+HOqV52vawpqeYk2TWNO9rKmeNV3Jmq5iTSNY03msKcSaMlhTFmsKs6Yn2VC4oomF20+rFoa9rGkXa9rEmhpZU5A15bKmHNaksSHhDu5vPWuALMpl0VYsHjqUZ45E9nFwPzzqR8z7kRO2AveCdFkLQ0nLjimnZokyuy2vKFbvO6xgYfEYvgOGO7ANO+gASMUG7UAY7UAnO9CBA1gEmgnaBjoC0kFGaGdj4jdKdADzQUWgmaCVoCMgo5zOERCnhfEpPi4nlh+f9HhR4ztwiz9i+bk/nOnMcIacY5QbM5gji43P0rP4EEoRv4lwu8yuDpaw+duE775NIEuxhd/Ab6RMbMRN8fLG1u8zfR3s9tbgk77iZHYbZamIOlZIQZaLcig1yvogyjCLciBl8EdRFrRmTIWZozXY27eFJQqrzb7vM973fZLRwcF+nPGk71WtQ2Wtvn9A8uhm3/6Ma33P53eYIXkq2MFQbNGkamfGUN+mXVL1KjSsb/VdKYrNvisyRvsuzJAN9bGG8xpRCzt8k4LTfWPQX1nGLF+4EX1u9hVlnOcbEdMaJGw2+/phCqEYm4fJ9sqQgwayZIdThnSwhnBv0zpTlWm8abCpwNTb5Df5TJmmdFOS2W12mhPNdrPVbDYbzaqZ4xiTJM7LIXGKSzLKH2gaVfkDO8k7Ocmf1Mmf3XFm5nQ2RXooFbxicgle1ttmU8UsLfLN5EAHs06cHjEESljEXUEVlSWRoaGKDpM+KTIkVBExTTi3qoWxG6ohjfA1HYwqqzqYLkSr0sX/rXcSY65V16eL8oxV11dXkzfl4iJvkXukq3BU2c9AbRxPeft7T+MzI+sqJldFHsmsjhQIRs+sroj8Tvzneyf7kh0tL+tkX4iiuqpTGcm+LJ8k5MrIsurqig42VeqRxr6AHiLmC6lnxotZ6JFmzorprY/p5cIeejmigJ7FQrlSL9dikXoqE3otjTnlZS05OVLHo1Gj1Gn0aKfq7MqFTm6u1Elpol1SZ1dKk9CJjJQqGRlQycqQKiyNMqRKBkuTKlNPquTHVa49oXKtHElhJ3UyYjoJB7t0Eg5C59/PVb941ZfgFNY2vHr2DPGHi9pAeT2oNrL24gZvpGmWprXMro7/RSNYO2t2gyjr6iPVgfqyyOxAmdYyfMbPNM8QzcMDZS00o7yyqmVGuL6sdXh4eHmgrqy6bfSEgUNOG+vaE2MNnPAznU0QnQ0UY40e8jPNQ0TzaDHWEDHWEDHW6PBoORbJGJ9Q1WKmkmp8cMmyjdusiNfadH91SYpz0UgZvMP93ivTt+C0spFsoeqIPVASSQCJpj7FfYpFE54p0ZQo/joVb/JeOdyfvoVtjDc5IXYFSii0dFnjMvKWzyuL/WvEBdHSZcLhMQw1/tKFtvJIuK6scSnh5JyHk3MRTs4tJhOktWJJkWFdMputHF/dMWFfCIcJoaKcUBSyEUJmscQVf7r/y+Kl/Bxt4k+2sXAWW0qN1Uokq6KSIxVUxv8MsAVnKfF6aKzGAhtZiDV29RGfduxrVxRizV20dFmci/tiabyMWcKkscslJy7hrNAJjy1Fh+JSSGHiMigK4/IL6zPbNvrOrBNSoB4lC1n042Qlq/zNjA1oJzswgRKAiRId+OI+Tk584B4nF/BHHENdwB7kBiZRD2Ay8AdKoSSgh5KBXuAxfCF7wKdRKvh0SgNmSMykdGAWZejf4+grUKNMoB8H2+8pmzRgAPgd5ZAfmEvZwCDwW+pJAeAZlAPEdy4wT2KIeurfUG86A9hHYl/KA+ZTCNiP+gD7A7+mAuoLHED5wIHUT/+KBkkcTP2BQ2gAcCgN1P9FhRKH0SDgcIkjaDDwTBoCHElDgUVUqH+JL8xhwGIaDiyhEcBS4BdURmcCy2kkcBQV6UdpNIWBY6gYeBaVAM+WWEGlwHOoDDiWRulHaJzE8TQaOIHGACfSWfrnNEniZDobWEkV+mGaQmOBUyVOo3HAKhqvf0bVNAE4HXiYzqWJ4GfQZGANVQLPkziTpuj/pFqaCqyjacBZwE9pNlUD59B0YD2dCzyfZuif0FyJDVQDnEfn6R/TBVQL/kKJ86kOuIBmQX4RzQYulLiI5ugf0WKqBy6hucBGiUupQf+QltE84MV0AfAS4Ae0nC4ErqAFwEvpIuBlEi+nhcAraBHwSlqsv08rJTZRI/AqWgr8DS3TxW9BLgZeLXEVXaIfomtoOXA1rQCuoUuB19Jl+rvUTJcD19IVkFwHfJeupyuBN9BK4I10FfAm4EG6mX4DvIV+C/wdXa0foFsl/p5WAdfRauBttAattwMP0B10LXA9Nevv0B9oLfBOug74R4l30Q3ADXQj8G66CXgP8G26l24G3ke3AO+n3wEfoFv1t+hB+r3+Jj1E64Ab6TbgwxIfoduBj9IdwD/RH4CbJD5GdwIfpz8CI3QXsAX4BrXSBmAb3Q1sp3v11+kJuk9/jTZL/DPdD+ygB4Cd9CBwi8QnaSPwKXpYf5X+Qo8An5a4lR4FbqM/Af9Km4Db6THgDnpcf4V2UgT4N2rR/0HPSHyWWoHPUZu+n56nduAuegL4Am0G7qY/A/dQB/BF6gTulbiPtgD/Tk8BX6K/6C/Ty8CXaD89DfwHbQW+Qtv0v9OrEl+j7cDXaQfwDdoJfFPiW/Q34Nv0DPAdelbfRwckHqTn9b30Lu0CHqIXgO9JfJ92Az+gPcAP6UXgR7RPf5E+lvgJ/R34Kb2k76F/0svAzyQepv3Az+kVfTcdoVeBRyV+Qa8Bv6TXgf+iN4BfSfya3tJfoG/obeC39A7wO+Au+p4OAI/RQeAP9C7wR4nH6T39eYrS+0CdPgD+N6f/38/pX/zKc/o/u53TP/mFnP7JT3L6x7+Q0z/6SU7/sBs5/f0TOX3JaTn9vV/I6e/JnP7eT3L6IZnTD52S0w/JnH5I5vRDp+T0d3+S0w/KnH5Q5vSDv8Kc/vr/o5y+/785/b85/VeX03/t5/Rfb07/pXP6f3P6f3P6z+f05379Of1/ABquEH0KZW5kc3RyZWFtCmVuZG9iago5IDAgb2JqCjw8L1R5cGUgL0ZvbnREZXNjcmlwdG9yCi9Gb250TmFtZSAvQXJpYWxNVAovRmxhZ3MgNAovQXNjZW50IDkwNS4yNzM0NAovRGVzY2VudCAtMjExLjkxNDA2Ci9TdGVtViA0NS44OTg0MzgKL0NhcEhlaWdodCA3MTUuODIwMzEKL0l0YWxpY0FuZ2xlIDAKL0ZvbnRCQm94IFstNjY0LjU1MDc4IC0zMjQuNzA3MDMgMjAwMCAxMDA1Ljg1OTM4XQovRm9udEZpbGUyIDggMCBSPj4KZW5kb2JqCjEwIDAgb2JqCjw8L1R5cGUgL0ZvbnQKL0ZvbnREZXNjcmlwdG9yIDkgMCBSCi9CYXNlRm9udCAvQXJpYWxNVAovU3VidHlwZSAvQ0lERm9udFR5cGUyCi9DSURUb0dJRE1hcCAvSWRlbnRpdHkKL0NJRFN5c3RlbUluZm8gPDwvUmVnaXN0cnkgKEFkb2JlKQovT3JkZXJpbmcgKElkZW50aXR5KQovU3VwcGxlbWVudCAwPj4KL1cgWzAgWzc1MF0gMzkgWzcyMi4xNjc5NyA2NjYuOTkyMTkgMCAwIDcyMi4xNjc5NyAwIDAgMCA1NTYuMTUyMzQgMCAwIDc3Ny44MzIwMyAwIDAgNzIyLjE2Nzk3XSA1OCBbOTQzLjg0NzY2XV0KL0RXIDA+PgplbmRvYmoKMTEgMCBvYmoKPDwvRmlsdGVyIC9GbGF0ZURlY29kZQovTGVuZ3RoIDI2NT4+IHN0cmVhbQp4nF2RTWuEMBCG7/kVc9welmi6snsQYdcieOgHtf0Bmow2UJMQ48F/33xsLXQggYd538nMhNbtU6ukA/pmNe/QwSiVsLjo1XKEASepSM5ASO7uFG8+94ZQb+62xeHcqlGTsgSg7z67OLvB4Sr0gA+EvlqBVqoJDp9157lbjfnGGZWDjFQVCBx9pefevPQzAo22Yyt8Xrrt6D1/io/NILDIeeqGa4GL6TnaXk1IysxHBWXjoyKoxL98kVzDyL96G9Ts5tVZdrpUkZpEdaRHlqhJVEQqWKJronN85V4v/62+N8POUcYuqdLprk750F5Y4z47X631Y8ddx3nDpFLh/h1Gm+AK5wck/4erCmVuZHN0cmVhbQplbmRvYmoKNCAwIG9iago8PC9UeXBlIC9Gb250Ci9TdWJ0eXBlIC9UeXBlMAovQmFzZUZvbnQgL0FyaWFsTVQKL0VuY29kaW5nIC9JZGVudGl0eS1ICi9EZXNjZW5kYW50Rm9udHMgWzEwIDAgUl0KL1RvVW5pY29kZSAxMSAwIFI+PgplbmRvYmoKeHJlZgowIDEyCjAwMDAwMDAwMDAgNjU1MzUgZiAKMDAwMDAwMDAxNSAwMDAwMCBuIAowMDAwMDAwNDUwIDAwMDAwIG4gCjAwMDAwMDAxMDcgMDAwMDAgbiAKMDAwMDAxMDExMCAwMDAwMCBuIAowMDAwMDAwMTQ0IDAwMDAwIG4gCjAwMDAwMDA2NTggMDAwMDAgbiAKMDAwMDAwMDcxMyAwMDAwMCBuIAowMDAwMDAwNzYwIDAwMDAwIG4gCjAwMDAwMDkyMzkgMDAwMDAgbiAKMDAwMDAwOTQ2NiAwMDAwMCBuIAowMDAwMDA5Nzc0IDAwMDAwIG4gCnRyYWlsZXIKPDwvU2l6ZSAxMgovUm9vdCA3IDAgUgovSW5mbyAxIDAgUj4+CnN0YXJ0eHJlZgoxMDI0MgolJUVPRg==", +} +BINARY_IMAGE = ( + b'GIF89a=\x00D\x00\xf7\xa8\x00\x9a,3\xff\xc0\xc0\xef\xc0\xc0uXg\xfc\xf9\xf7\x993\x00\xff\xec\xec\xff\xa0\xa0\xe5\xcc\xbf\xcf\x9f\x87\x0f\xef\xef\x7f\x7f\x7f\xef\x0f\x0f\xdf\x1f\x1f\xff&&_\x9f\x9f\xffYY\xbf??5\xa5\xc2\xff\xff\xff\xac\x16\x19\xb2&\x00\xf8\x13\x10\xc2& \xdf`PP\x84\x9b\xf8\x03\x00\xb5\x0b\x0c\xdf\x0f\x00>\x9a\xb5\x87BM\x7f`P\xd2\xa5\x8f\xcc\x19\x00\xa5,\x00\xec\xd9\xcf\xe5\x0c\x00\xeb\t\x00\xff\xd9\xd9\xc7\x0c\x0c\x0f\x0f\x0f\xffyy~MZ\xfb\t\x08\xe5M@\xfb__\xff33\xcf\x90x\xf2\xe5\xdf\xc3\x06\x06\xbf\t\x08\xff\xb3\xb3\xd9\xb2\x9f\xff\x06\x06\xac)\x00\xff\xc6\xc6\x0c\t\x08\xf9\xf2\xef\xc9s`\xb8#\x00\x9f/\x00\xff__\xff\x8c\x8c\xc5\x1c\x00\xdf33\xffpp\xcf\x19\x19\xc0\x13\x10\xbf\x90x\xf7YY\xff\xf6\xf6\xe7??\xd7&&\xefLL2& \xdf\xbf\xaf\xbf\xbf\xbf???\xc5M@cn\x81_\x00\x00___\xcb00\xd8\x13\x00YC8\x80\x80\x80\xf3RRsVH\xc490\x10\x10\x10\x917@\xf2\x06\x00\xcf@@\xca\x86pooo\xa3!&\xc1\x1d\x18\xcf//\x1f\x1f\x1f\xdf\x00\x00\xd2\x16\x00\xcb\x90x\xbf\x1f\x00\x19\x13\x10\xf3\xd0\xd0\xe399&\x1d\x18Yy\x8e\x8f\x8f\x8f\xff\xa9\xa9\xcb\x13\x13\xbf00SF@\xb6& >\x1d\x18\xfb\xdd\xdd@@@\x99\x93\x90\xff\xbc\xbc\x7fPP\xaf\xaf\xaf\xc6VHzsp\x93& \xb7pp\xb3\x86ptPP|pp\xafOO\xd0\xd0\xd0\xef\xef\xefL90\xbc\xa9\xa0o0(\xeb\xb0\xb0\xff\xe0\xe0\xff\xd0\xd0\x870(K0(\xc9|h\x9f__lct\xebFF\xcf\xcf\xcf\xe0\xe0\xe0b& \xff },(@0(\xa9\x93\x88\xa6|h\x1f\xdf\xdf\xd5\xac\x97\xe2\xc5\xb7\xc7`POOO\x9cyhppp\xff\x80\x80\xff\x96\x96\xd7``\xcc\x99\x7f,\xb0\xcf\xbf\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\xffff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00!\xf9\x04\x01\x00\x00\xa8\x00,\x00\x00\x00\x00=\x00D\x00\x00\x08\xff\x00Q\t\x1cH\xb0\xa0\xc1\x83\x08\x13*\\\xc8\xb0\xa1\xc0\x1b\x07\x0c8\x9cHq\xa1\x89\x14\xa72F\xac\xc8\xb1\xa2\t\x1f\x19Cj\x94\xd8\xb1$B\x03\x07D\xaa\x1ci\xb2%*#3V\xcad\xe9\xb2\xa2\x9d 3s\x9e\xdaX\x93!"\x8c:\x83\xf2\xeci\xf0c\xd0\xa3!\x87\x12E\x89\xb4iR\x92.a:\x9d\xfa\xb4\xe5\x0c\x9cT\xb3\xee\x84:\xf1\x06P\xad`\x95*4\n\xb6l\xd5\x84\x06>\x99]\x1b\xb2\xc5\x9c\x83F\xda\xb0\x9d{\xe4\x84\x00\x83W\xe7\xaeM\xe2f\xd4\xa8\xbb\x03\xbd\xea5kE\x88_\xbf\x80\x0fy\x1a\\\xb6\x08\x92\xc3\x87\x01\x070\xe5\x00\x02\xe3\xa9-\x80\xc4\x80\x1cY\xe0dS\x94-_\x0ezd3\xe7\xce\xa8>\x83\x0e=Zf\x92\x13\xa7Gm\x18 \xe1\xaf\xe7\xd5\xb8+\xb7\xceX8\xf6(\xda\xa2D\xd9N\x8d\xbb\xb8n\xc6\x8e}\x8f\xfa\x12<\xf8\xf0\xcf\x11\x1a\x14\x07}|mf\xdf\x00\x9elP\xd1\\\xb8dSaJ\x95\xffz }zu\xadiLs\xa6\xb0&8\x80\x01\xdd\x9f\x9b\x8a ^<\xf9\xe9\xac\xa9:\x82\x1d{\x83\x84\xe6\xef\xc5\xf7\x1d}\xf5\xd9W\x9eq\xa2\x1d\x95\x84a\xb1\xa9\xb0\x01\x00\xdd\x05\xd8\x9c|\x04\x16X\x8a\x02\x0b0\x80\x9f\x0b=\xe8\x94\\l\x1et \n\x00\x10\x02\x08\xdf\x84\x03ZX \x86\x1a\x16W\x03\x87+]\xe7[\x06\x00\x96\xe8\xde\x89\xce\xa5\xa8\xe2\x8a\x19N\xf7b\x87\x19\xa5\x17\x1b\x05\xa3P\x10\xa1\x8d#\xe2X\x9b\x8e;\xf2\xd8"n/\xd6\xd5\xdf\x13\xa2x\x80$\x89\x11\x9e\xd8\x81\x16\x146\xb9#\x8b\xd3\xf9\xe6\xc1\x7f\xa2\x0cp\xe5\x99\x12\xa8\x80\xdad\x15zi!\x98\xab\xf9Ff\x99gvG$g\xdf1\xa0\x80\x9bM\xc2\t\x19\x00\x19p\xd9\x9d\x99G6\xd7Hl\xdf\x99\xc2\xc8\x9e|~\t\x88)~Q@c\x99\xa3\x0cZg\x06\x00\xf8\x96\xa8)\x0c,\xc0h\xa3\x05^\x02\xe9(\x93Rji\x84\xcb)\'\x1fn\x9d~\nj)\xa3\x0e\xffZis\x84\x06\xd7\x81\xaak\xae\xc6\x01\x07\xa0\xb5\xfa*\xac~\xc9z\xaa\x04\x03l\x80+b\xb7\x81V@\x01$\xac\xd6\xe9\xab\xb1\xd2:kpj\x0ep\xe7\xb1\xab\x9aRA\x01!\x14\xd7\xc0\x03\x8dF\x1b\xdc\x00\xd3\x8ar-\xb6\xc8\x12\x07Z\t\x15\xf0:\xdd\xb7n\x8ak\xaa(\x1ddz\xac\x14\x86\x80\x92+~\xf8\xc1\xbb\xa3\xbc\xe4\xae\xe1\x01\xbaR\xfcAG\'\\\xa4\xab\x1a\xbf\xef\x82k\xa1\xbc\x03\xa3\xeb\xd7\x1d\xa4T\xcc\x87\xc2\xc5qP\x02\xc3\xab\xf9+\x9e\xb8OH\xec\xd7\x1bYTL\x8a\x1f~\xa1\x91\xecj"\xd8\xc01n\xfe\x8e\xdaA\x06\xe7\xa2;\t)Q\xb0AJ\x15\\\xa8\xbc2h!\x14\xe0\xee\xcb\xa05\x10\xc6\xa8"s&\x07\n\x13L\xb0sA\x0b\x9b\xa2\x81\x08"h\xf02\x0f\x15\xe0\x964g2\xa8\xd1D\xd3\xa4\xe8\x01\xf5t\x1c\x14`\xc6\xcb\xcbN\x11\xe7\xd6\x87]@\xca\xd7\x8f\x90\xf2\x01\x08#\x10t\x80$\xc5\x99\xc1-\xc7?\x14\xff@\xc6\xdal\x8f\xe2\x04)b0\xb1\t)}\x84\x12J&\x04\x05\x02\xc5\x18\xb8\xd9P\xc0\x0f\x1c\x93`5h\x81_\xb0H(j\x98\xacD( \xc0`P\xc5\x8f\x83\xa6\xc1\xb6;l1\x9d\x06\x1bk\x9d4\x18:(\x1e\n\x15&sR\xb7A9\xc0Q\xf1 \x18X\x00Z\xdf<\x84\xa0:h$H^\x1cgC\\\xa0\xdc\x10\x9a\xc8\xae8\x11gdQ\x07\x01\x07!\x10\n\x11W| {\xef\xa6\x90\xb0m\x01"T B\x01<\xa8\xed\xba_X|pE\x1e\xa7\xc9\xe0D\x19\xce\xcb\xbe\x04\xf5\x08\x11\x80@\x02\xf1+\xce}\t!\xecP\xc1\x0ed\xb8\xdc\xf9\x86\xa0\x88\x8aQA\x06\x90\xc1\x02\xfc\xf2G\x83\x1c4\xc4~\xf8\xcb\x1f\xf7^v\x98D\x98\x0c\x07\xca\x1b\xc5\x05\xba\x90\xbfP`Bt\x14\x81`\x07\'\xc8/\xbf\xc8@\toC\x01)\x9c\x00\xbb\x0e\xd2\xcd$"\x94\xa0\xef\xf0\xe3\x978\xe0l\x02^ \x05\x07\xf3\x97\x00\x04\xd0\xaf%1t\xde\x0b|X\xb0\x820\x8db\x0f\xa4`\xc2\x04\x16@\x8a\x0e\xce\x8f(\x02\t\xa2\xec\x86X\xc4\xb5\x15"\x898\xc4A\xfc\x1a\x08\xc5\x82HQqT\xc4\xdc("A\n<\x08\x02\x05\x94\x90\x1d\r@\xd8E\x83|1\x14T\xbc\x80\x0e>@\n\x14\x88An\xa0\xbb]\x1b\x13\xf2F\xd9Y\xc2dg\xe8\xe1\x1e\x1d\xd2\xc7P\xa0\x10\x07\x84\xf8\xe1 \x1fx\xbf\xfc\x11\xa1\x12\x90XdG\x82\xb8FI\x02q\t/\xb4\xa4&[\x12\x10\x00;', + "png", +) +ARRAY_TO_BASE64_IMAGE = ( + "data:image/png;base64," + "iVBORw0KGgoAAAANSUhEUgAAAD0AAABECAIAAAC9Laq3AAAIzElEQVR4nNXab0wb5x0H8C8x8R9ixCmuCLZi5dIlJi+gPg2kAC+KSaaJpXFLm7XJQiU7SkervcjopiqaFAXTok1tOsVkb5JmUY3UhiRSJ1YzGtGRXF4MO1OuMsMv4MKUs2CGWLg6zwRjC5S9OOq/5/NfEu37Ah333D334Xh+D8fjq3j69Cn+D7Nty6/gcmFoCMFgeXut2ML7zbJwOBLitjYcPQqNpix9b42bZeF0gmVFmsqkL7c7GMToKCYncxxWsr587kgEExNwOgs4pQR9mdwTExgdxepqMecWpS/ZPTWFmzfLMF0UqC/BLVF8RSdvfVHuPIuv6OShL9BdRPEVHUl9Ie7RUUxMFFl8RSeLPj+3ywWns+x/qwtIhj6XeyuKr+gk6bO7g0HcugWP51nCcmY9GsX585Uvvlgp0hiJwOnExMQzV+XI0uwsxzAHTp0iRNzPpfhyhff751yulaQCS3I/9+ITy8ry8pzLxS8upu2vBACfDw4H/P7n4MqetXCYY5ilLFNCJQBwHGw2GAxoakJ19TPViWU9Gl3wehemp9djsWzHJI0TlgXLPnf90uzsnMslIRaSUZfPT8/7/TM0vbayktm0ukNNm7tpc/cn3S8Le8TmQTxrfbbiEzJ24l3a3B3ZkcLI4hay9Xrp4gOwsNfwzYn3MvenuOn2dpLjSJ8v5ZCt0QvFxzGMaOvDhqb7h15949qFhw3Nogck3B6jsYOmAVgcDpvNtqX6helpjmFEiy9Yq/3q9AfTBzsAHLzzddrwiCex7sMThLAxZLXu5Tjr559ze/akH86yGB4GTSMcLk68zHHu69ezzRirO9QfX7wpoKWTdb2q7Hre7/c4nd7xcdEZ46755OoO9X/21me7wWmRrEtgyGod6erqtdt77XYiFEppE0ZOUxMaGqBQSHQiXXzuQ+ZvTrz3fa1u96PZfMRCcq8Phgii32YjOc7W18fX1KQ3MwyGh8EwiEYzz12PRjmGcQ8PZ0MPDlz98syH39fq8hfn6xYipY/FRPUL09Pu4WHRGSNYqxW+zmWZLkSjepIYloWtx+apX5qdzVZ8qzvUX5zpt3025j5kLug27wz43750vkh3nvqZe/dEi899yGz7bOz+oVcB5Ine732gehJ+49qF/p5XXrpPl+TOrc+Sv5z+IM/pQsjOgH+/l/mk++UO5/W0poSb8nhqeD7/ToXk1D9saBocuPqvgyYABaFNzi81AfEnFiS7iVDI3ttbBB1J+pHXXovvDNZqBweuXhr481xD88Le+vx72+d9cObcO8eufSpxTMo4sQ4NcSTZZ7MVre+12+PffnHmw4KmCwD7vczZ94//+twv93vFn1viSR/fRChk6+8vWu8jyfh2QWhhKAPY/SivtZp0N1cDrqZUfUFRPQn/7Mbls+8fL+isdPf4Pozvg18NpN77MiETUT0J7/cygvjIjStVT0TmTYmku7VhAFhMqntB/4gkLQ5HidbkvHT/LoAjN65ITBoSSXe3zkMbhiZj2Yf0+RynTpVFvzPgP3PunTy5aopqGBmps1rT9qe7X4jAzIIMQTQl6hvv3+2+dL6/55Wc04UQNUX91WR6026/QhCEySTlzidF6HcG/AB6/vCbljsFrPmPkuSA3U7TtN1uX6Ko5CZxN1eDZVWOTvPXH7zzdUHczVDUIE3Hv5vgOGGjkiCQzT2pxz0yr84l9DsD/n3eB7aeI29f6itMDAC4s77G87zFYrl48SIANUURJlOzx6M2GrG5/n3vHlJHD6MFo8NP57IOdNFwe/bwBEFNTdFFMDPSp6+b+m+E53kAFRUVNputry/x84vf74YA1FFM6hGV5b6AwwinAQBIn4+amiqHGVAplwAqaUzHwnxyu7hbsYG2eawo4Nqd+xKxSixWY7Y87zlsRqavY+eXhG2PxwNge5Cbvm4Psh5h5zYAaG+Hw4GkRwsAZAiGZbAvgNHmuEbDYwCI5fGbyT+yehIAx3E0TdtsNgDNBjK2wnP0yPzkbST+n7dYpijqIkXZgDjf5EOwCowOURnaFrJeo20BJA9NpExiA6l4q1Om32XwzLA+X0dHB4AfG0itpkauJkhTV7WORPI6hNFoHAKGAAsQ1x9lMf4jeHchrEDbPKoz1mqiMoTl0BX2cCGebfo65VudMsPmck2TgYwPlV8d6yRNXRoDFT848XlaLMyf/PnrX43TAI62Un+qJ7VOWhHkAUzuhncX5OtoDMAQTOj9arj0CFahJ/XPH50KqtAQ2zTEBstlE1doCIXZtL3VmLwzHIme/OhyZAMff2Q73fOuTK5MOUVw+xl6kaHDkejopEddpTT/0IXGNSXo/Wowus3nLXUU1TGE5VhRQL6O1gXUp34olOze3kp9W0+urK4dA6K3bqeTVUr5T1rkh1sqVCIrRxoDpW/rTBOnuDdia4+n3YFp90ZsTeT8H/TLKvgILFchJoN8A7owDEEoNtKPj7srNMQFfd3fPDMAfnG4pWfSg0ii/+2tlOJ4p6i4WkuSpi55NZHZlOIWkqc+W1+Zbjd14HeeGWFbrVKO6euE0SIzkEpr1zaNyP/RKk2dvrVTKD6JiHxeXLp+061S/lZf9x3Ltbe3ezyeUCj0D7Np3TOTXHzJkasJXbMpufgKc5euF9wRA3mE5SwWi8Ph6O3tHRwc/Ofve0XvsUyurG1s2dXYIjqURZN1PVYmV+qaTLsaW0T1wVYjTx2onXDX/t1dGRH5wQD8GwBgtVoBEMJDnBhaoviKcefUb6gUi0fbA4dbsunnqhIUnufVqnRZzuIr3l2KPry6Joh5nnc4HM31ZLJY22TKWXwSKfj9KolxL4tEBb1LX6cwm8aCfL9jpKamhiAIn8/XZ+0ytxoLKr5yunPq42HnH58cuCxsazXE2KdnaxtbdE2m4qBpKen9wZz6nj8OfcdyapVyxHHZ1HW80OKTSBne15TQhyPRgIw4aD6xJ/PDrdJStvdjM/WlF59Eyvw+8kZsbX7ydtjPlaX4JLKV761vZf4H0dLrJY2D0p4AAAAASUVORK5CYII=" +) +BASE64_MODEL3D = { + "name": "Box.gltf", + "data": "data:;base64,ewogICAgImFzc2V0IjogewogICAgICAgICJnZW5lcmF0b3IiOiAiQ09MTEFEQTJHTFRGIiwKICAgICAgICAidmVyc2lvbiI6ICIyLjAiCiAgICB9LAogICAgInNjZW5lIjogMCwKICAgICJzY2VuZXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAibm9kZXMiOiBbCiAgICAgICAgICAgICAgICAwCiAgICAgICAgICAgIF0KICAgICAgICB9CiAgICBdLAogICAgIm5vZGVzIjogWwogICAgICAgIHsKICAgICAgICAgICAgImNoaWxkcmVuIjogWwogICAgICAgICAgICAgICAgMQogICAgICAgICAgICBdLAogICAgICAgICAgICAibWF0cml4IjogWwogICAgICAgICAgICAgICAgMS4wLAogICAgICAgICAgICAgICAgMC4wLAogICAgICAgICAgICAgICAgMC4wLAogICAgICAgICAgICAgICAgMC4wLAogICAgICAgICAgICAgICAgMC4wLAogICAgICAgICAgICAgICAgMC4wLAogICAgICAgICAgICAgICAgLTEuMCwKICAgICAgICAgICAgICAgIDAuMCwKICAgICAgICAgICAgICAgIDAuMCwKICAgICAgICAgICAgICAgIDEuMCwKICAgICAgICAgICAgICAgIDAuMCwKICAgICAgICAgICAgICAgIDAuMCwKICAgICAgICAgICAgICAgIDAuMCwKICAgICAgICAgICAgICAgIDAuMCwKICAgICAgICAgICAgICAgIDAuMCwKICAgICAgICAgICAgICAgIDEuMAogICAgICAgICAgICBdCiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJtZXNoIjogMAogICAgICAgIH0KICAgIF0sCiAgICAibWVzaGVzIjogWwogICAgICAgIHsKICAgICAgICAgICAgInByaW1pdGl2ZXMiOiBbCiAgICAgICAgICAgICAgICB7CiAgICAgICAgICAgICAgICAgICAgImF0dHJpYnV0ZXMiOiB7CiAgICAgICAgICAgICAgICAgICAgICAgICJOT1JNQUwiOiAxLAogICAgICAgICAgICAgICAgICAgICAgICAiUE9TSVRJT04iOiAyCiAgICAgICAgICAgICAgICAgICAgfSwKICAgICAgICAgICAgICAgICAgICAiaW5kaWNlcyI6IDAsCiAgICAgICAgICAgICAgICAgICAgIm1vZGUiOiA0LAogICAgICAgICAgICAgICAgICAgICJtYXRlcmlhbCI6IDAKICAgICAgICAgICAgICAgIH0KICAgICAgICAgICAgXSwKICAgICAgICAgICAgIm5hbWUiOiAiTWVzaCIKICAgICAgICB9CiAgICBdLAogICAgImFjY2Vzc29ycyI6IFsKICAgICAgICB7CiAgICAgICAgICAgICJidWZmZXJWaWV3IjogMCwKICAgICAgICAgICAgImJ5dGVPZmZzZXQiOiAwLAogICAgICAgICAgICAiY29tcG9uZW50VHlwZSI6IDUxMjMsCiAgICAgICAgICAgICJjb3VudCI6IDM2LAogICAgICAgICAgICAibWF4IjogWwogICAgICAgICAgICAgICAgMjMKICAgICAgICAgICAgXSwKICAgICAgICAgICAgIm1pbiI6IFsKICAgICAgICAgICAgICAgIDAKICAgICAgICAgICAgXSwKICAgICAgICAgICAgInR5cGUiOiAiU0NBTEFSIgogICAgICAgIH0sCiAgICAgICAgewogICAgICAgICAgICAiYnVmZmVyVmlldyI6IDEsCiAgICAgICAgICAgICJieXRlT2Zmc2V0IjogMCwKICAgICAgICAgICAgImNvbXBvbmVudFR5cGUiOiA1MTI2LAogICAgICAgICAgICAiY291bnQiOiAyNCwKICAgICAgICAgICAgIm1heCI6IFsKICAgICAgICAgICAgICAgIDEuMCwKICAgICAgICAgICAgICAgIDEuMCwKICAgICAgICAgICAgICAgIDEuMAogICAgICAgICAgICBdLAogICAgICAgICAgICAibWluIjogWwogICAgICAgICAgICAgICAgLTEuMCwKICAgICAgICAgICAgICAgIC0xLjAsCiAgICAgICAgICAgICAgICAtMS4wCiAgICAgICAgICAgIF0sCiAgICAgICAgICAgICJ0eXBlIjogIlZFQzMiCiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJidWZmZXJWaWV3IjogMSwKICAgICAgICAgICAgImJ5dGVPZmZzZXQiOiAyODgsCiAgICAgICAgICAgICJjb21wb25lbnRUeXBlIjogNTEyNiwKICAgICAgICAgICAgImNvdW50IjogMjQsCiAgICAgICAgICAgICJtYXgiOiBbCiAgICAgICAgICAgICAgICAwLjUsCiAgICAgICAgICAgICAgICAwLjUsCiAgICAgICAgICAgICAgICAwLjUKICAgICAgICAgICAgXSwKICAgICAgICAgICAgIm1pbiI6IFsKICAgICAgICAgICAgICAgIC0wLjUsCiAgICAgICAgICAgICAgICAtMC41LAogICAgICAgICAgICAgICAgLTAuNQogICAgICAgICAgICBdLAogICAgICAgICAgICAidHlwZSI6ICJWRUMzIgogICAgICAgIH0KICAgIF0sCiAgICAibWF0ZXJpYWxzIjogWwogICAgICAgIHsKICAgICAgICAgICAgInBick1ldGFsbGljUm91Z2huZXNzIjogewogICAgICAgICAgICAgICAgImJhc2VDb2xvckZhY3RvciI6IFsKICAgICAgICAgICAgICAgICAgICAwLjgwMDAwMDAxMTkyMDkyOSwKICAgICAgICAgICAgICAgICAgICAwLjAsCiAgICAgICAgICAgICAgICAgICAgMC4wLAogICAgICAgICAgICAgICAgICAgIDEuMAogICAgICAgICAgICAgICAgXSwKICAgICAgICAgICAgICAgICJtZXRhbGxpY0ZhY3RvciI6IDAuMAogICAgICAgICAgICB9LAogICAgICAgICAgICAibmFtZSI6ICJSZWQiCiAgICAgICAgfQogICAgXSwKICAgICJidWZmZXJWaWV3cyI6IFsKICAgICAgICB7CiAgICAgICAgICAgICJidWZmZXIiOiAwLAogICAgICAgICAgICAiYnl0ZU9mZnNldCI6IDU3NiwKICAgICAgICAgICAgImJ5dGVMZW5ndGgiOiA3MiwKICAgICAgICAgICAgInRhcmdldCI6IDM0OTYzCiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJidWZmZXIiOiAwLAogICAgICAgICAgICAiYnl0ZU9mZnNldCI6IDAsCiAgICAgICAgICAgICJieXRlTGVuZ3RoIjogNTc2LAogICAgICAgICAgICAiYnl0ZVN0cmlkZSI6IDEyLAogICAgICAgICAgICAidGFyZ2V0IjogMzQ5NjIKICAgICAgICB9CiAgICBdLAogICAgImJ1ZmZlcnMiOiBbCiAgICAgICAgewogICAgICAgICAgICAiYnl0ZUxlbmd0aCI6IDY0OCwKICAgICAgICAgICAgInVyaSI6ICJkYXRhOmFwcGxpY2F0aW9uL29jdGV0LXN0cmVhbTtiYXNlNjQsQUFBQUFBQUFBQUFBQUlBL0FBQUFBQUFBQUFBQUFJQS9BQUFBQUFBQUFBQUFBSUEvQUFBQUFBQUFBQUFBQUlBL0FBQUFBQUFBZ0w4QUFBQUFBQUFBQUFBQWdMOEFBQUFBQUFBQUFBQUFnTDhBQUFBQUFBQUFBQUFBZ0w4QUFBQUFBQUNBUHdBQUFBQUFBQUFBQUFDQVB3QUFBQUFBQUFBQUFBQ0FQd0FBQUFBQUFBQUFBQUNBUHdBQUFBQUFBQUFBQUFBQUFBQUFnRDhBQUFBQUFBQUFBQUFBZ0Q4QUFBQUFBQUFBQUFBQWdEOEFBQUFBQUFBQUFBQUFnRDhBQUFBQUFBQ0F2d0FBQUFBQUFBQUFBQUNBdndBQUFBQUFBQUFBQUFDQXZ3QUFBQUFBQUFBQUFBQ0F2d0FBQUFBQUFBQUFBQUFBQUFBQUFBQUFBSUMvQUFBQUFBQUFBQUFBQUlDL0FBQUFBQUFBQUFBQUFJQy9BQUFBQUFBQUFBQUFBSUMvQUFBQXZ3QUFBTDhBQUFBL0FBQUFQd0FBQUw4QUFBQS9BQUFBdndBQUFEOEFBQUEvQUFBQVB3QUFBRDhBQUFBL0FBQUFQd0FBQUw4QUFBQS9BQUFBdndBQUFMOEFBQUEvQUFBQVB3QUFBTDhBQUFDL0FBQUF2d0FBQUw4QUFBQy9BQUFBUHdBQUFEOEFBQUEvQUFBQVB3QUFBTDhBQUFBL0FBQUFQd0FBQUQ4QUFBQy9BQUFBUHdBQUFMOEFBQUMvQUFBQXZ3QUFBRDhBQUFBL0FBQUFQd0FBQUQ4QUFBQS9BQUFBdndBQUFEOEFBQUMvQUFBQVB3QUFBRDhBQUFDL0FBQUF2d0FBQUw4QUFBQS9BQUFBdndBQUFEOEFBQUEvQUFBQXZ3QUFBTDhBQUFDL0FBQUF2d0FBQUQ4QUFBQy9BQUFBdndBQUFMOEFBQUMvQUFBQXZ3QUFBRDhBQUFDL0FBQUFQd0FBQUw4QUFBQy9BQUFBUHdBQUFEOEFBQUMvQUFBQkFBSUFBd0FDQUFFQUJBQUZBQVlBQndBR0FBVUFDQUFKQUFvQUN3QUtBQWtBREFBTkFBNEFEd0FPQUEwQUVBQVJBQklBRXdBU0FCRUFGQUFWQUJZQUZ3QVdBQlVBIgogICAgICAgIH0KICAgIF0KfQo=", +} +SUM_PIXELS_INTERPRETATION = { + "scores": [ + [ + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.9217332561281606, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.8478093032233159, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + 0.7775525960239336, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.28228141285466124, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.7110596409959468, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.5717043041883806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.17004439297432927, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.6232387569967188, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.349160393746381, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + 0.37415556842308434, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + [ + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.4147847905809689, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.21617448369040726, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.4393939393939394, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + 0.8667245705462266, + ], + ] + ], + "alternative_outputs": [ + [ + [1793106], + [1795539], + [1797837], + [1800021], + [1815417], + [1802088], + [1806420], + [1824192], + [1818906], + [1804818], + [1813338], + [1812561], + [1811298], + [1817472], + [1810533], + [1797249], + ] + ], +} +SUM_PIXELS_SHAP_INTERPRETATION = { + "scores": [ + [ + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.36599426908032084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.9044030984144017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.5780729041010304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.03706410007949775, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + [ + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.4724172299368354, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + 0.5148839775509372, + ], + ] + ], + "alternative_outputs": [[]], +} + +FILE_TEMPLATE_CONTEXT = { + "file_count": "single", + "value": { + "name": "sample_file.pdf", + "size": 10558, + "data": "data:application/pdf;base64,JVBERi0xLjQKJdPr6eEKMSAwIG9iago8PC9UaXRsZSAoVW50aXRsZWQgZG9jdW1lbnQpCi9Qcm9kdWNlciAoU2tpYS9QREYgbTk3IEdvb2dsZSBEb2NzIFJlbmRlcmVyKT4+CmVuZG9iagozIDAgb2JqCjw8L2NhIDEKL0JNIC9Ob3JtYWw+PgplbmRvYmoKNSAwIG9iago8PC9GaWx0ZXIgL0ZsYXRlRGVjb2RlCi9MZW5ndGggMjM2Pj4gc3RyZWFtCnicjZDfakMhDMbvfYpcD2bzTxNhFFZYe90h7AG2tTDoYO37w9S1O1A4cIyo5Bc/80mALR6pLVYY3k/hJ/RMJh6J82d4e4Dvlo2WRu1tb6UEPV538Hc4H8NqJ3C8DAWnDIQpd4lD2LdYomzcZ9O+Km1qWG0VSCRKG+xQD4FuTZeWdTcR0CiZiqtAPYXOGKOhEBnUD3hC5M0a6lcoObInwdIErsAHcI+F3cknsB3ANFJCU54Byf6B8AAvdZi9s8WokcXNFrvLEj0n0gXu5Hm8TJyiK6nm+54Ipd3IXnQiae5H5vyxTf724RdvlHTtCmVuZHN0cmVhbQplbmRvYmoKMiAwIG9iago8PC9UeXBlIC9QYWdlCi9SZXNvdXJjZXMgPDwvUHJvY1NldCBbL1BERiAvVGV4dCAvSW1hZ2VCIC9JbWFnZUMgL0ltYWdlSV0KL0V4dEdTdGF0ZSA8PC9HMyAzIDAgUj4+Ci9Gb250IDw8L0Y0IDQgMCBSPj4+PgovTWVkaWFCb3ggWzAgMCA2MTIgNzkyXQovQ29udGVudHMgNSAwIFIKL1N0cnVjdFBhcmVudHMgMAovUGFyZW50IDYgMCBSPj4KZW5kb2JqCjYgMCBvYmoKPDwvVHlwZSAvUGFnZXMKL0NvdW50IDEKL0tpZHMgWzIgMCBSXT4+CmVuZG9iago3IDAgb2JqCjw8L1R5cGUgL0NhdGFsb2cKL1BhZ2VzIDYgMCBSPj4KZW5kb2JqCjggMCBvYmoKPDwvTGVuZ3RoMSAxNjgwOAovRmlsdGVyIC9GbGF0ZURlY29kZQovTGVuZ3RoIDgzOTM+PiBzdHJlYW0KeJztegl4VEX276m6t/dOeiFJd9a+nU4aSQOBsAaQdDZAI3uABIkkQCQoyBJQcCPOiGBwHweVccRd1EE7i0yCjjDAuCAIo4y7grg7Iui4ovT9/6q6wzLqvHzvfe97z++bezm/OnXqnFpOnXtuXdLEiKgHQKV+o8vKR7HBrDcR90I6bPSE8ZNXVmy4k0hZg3rz6MlTSqxPm64jYhHU+42fnF+wfOjmfdAX9dqpZWOrJtxywddoSiJy3Tp7Qd0idjf7Eu2VaJ8x++Kl2j0Zr/6TyH4OkbHy/EVzF+xeUb2eyH036hfNrWtcRF6yoP8R0HfOnb/i/LWfPPI+UaqTyFbSMGfB8ttq5/aAbhnI3FBfN+dg0jPojx2E/uAGCNwDLCrqmCPlNCxYurzv++ptWBzmQ5/NXzi7LrV3+h6sB/3R8gV1yxcZ1iU0QR/zIe2iugX1ntr+bxMZUGVlixY2LtXzaB34+aJ90ZL6RbmvjN2KrrEe29OQKWQmTi5iug5e+HI4fUkj6I9kgtxJ+TQVo/8JugbUFZKX3lP0+TMX7E0jo+Oo1EnHHj92qVNKTruGS4mV+uI21C2pm0Xa7BVL5pM2d0n9haQ11M9aQtr8uqUXkXayTzKkrn94ZvmKmY4RX5vTzVJ873s980T5woThm489fnyuk8x2VC0nRhSlPc5zrCYm60lnAEO4GdaWDyzAzWgQbkbDcLO4BcnVJsW9koT4GoMyUfrLSOWonUPjaRJNg+eIyk6t6++dvH/iAUVZw26CN82G9YYBmFJ6rFT+Tudzt9nAbUaVi0ulf/Pe2PHjxlMYI00zvBydyAaYRrLWsNg4jK8GDU+KHSb1Z/fl/+R6muXLe3fs5hnyfkvcav+u23BPfF9LaAYpckd7x3ZU7mVSbF6YKYP3TvLsFB4uuLB+CXRPxbgPhB6H55mkRGnFKYNSZH/5sb3T35TYgCfrJ07//+cyPEt3GabSvafU7z+1XW08+WwZC2n2KXr3/HtfpuspVRQ0XUSpirxDF1BTnGfYjYvjPIfPGuK8ghg6I86rp+gYKA1aMd4IjqiYltA8qqP5NJYqkQfqUW+EZCGJp3MQnuD+1A/tY6VkIS2lFbQIWhqdRQsgnwvdi4Aa9QGd7E3DU1IP+TLwdZCeXjup9zA0CzBCf9waZtAg+/7paKWoLQEvsA7y2Az7yjHnx8ebhxEa0NYYH71RruZi4BxoEon3RdhmNXdvE01GkhkFhTnGwZFINzZL9+wtZpGppKUlxpENlJBg7aa95YS9NW6fAHI4bN2zt1ljEzbLFCmNHCCnw/6f7bouuy1mZTnd3uVK+N+2d4F69Ejsnn1iQmzBNjmuNMJLlZKTnd2zdyTGrDC4MzZ1SgZ5Pe7u2bucsQknyHEFRx5QekZS9+yT3LEJO+S40igDlJmV0j375B6xCTvluIKjLJCmebtn70mOTRjTSI1x8nXrz07tnr03JfbEwD4txlE2KDeY0T37dIyTTnLmmTGOgqC8PK179lkZsQVj5v4YR+Iw0LdvoHv2fp80FJPPiXEyCRQUBLtnn+OXhmLTesY4JCoc4Ab36p59zxxpKGaeF+NoMGjYsN7ds8/rGVuwRkitksPBhai0pKB79v1g1Q9lLtHAGIcXN1FFxdDu2Q8uiE04T44rOKoATZ48snv2I4aASDq9OMbRZNCMc8u7Z19yZmzCODeNiXF0LmjO7Iru2Y8plYaE5Y6LcfJFa9hCqaA0w0OUqgZFXOsfgT4WZXSe/rFoFyX/FModcSLaSJvYPNpEW2k7Owqrx6mT2uk5RGcZ3UmX0620Gm+K6ZBci3fPJLxpy+hWlqq34+RyD96499Ae6E6jK2kLpTCv/gmtpFXKy7BahQyTDRdNwBvtenaOvgynqwPqb2kIzpoX0SLWpFfpN+i36PfTA9SpPKcfR0ZMw1Jm0x79c8Nr+lsIjxn0e7qDDrBbLE/gzT8N54NO5Y94961XalSmz9WPYQZ+ugRzUPFu3cO28RB6r6ePmJddrpSil/v0iL4TWhlUg3foetrCBrHR3G+YoY/V9+AM1oeWo9c7qJU24+6gv9AbzG44qt+vH0V66Y3TwEr440W2TYkevypaJBwNL/WiQrQspKfpWdrHAuyvfKHBbigwhA2X6vuRE/vTFMz2IVh+yL7lV+JeqTyjjtJLkLlX0c3C2/Q3epel4Ww6nk3lvfhCfpeyBO+03vLEMAfv/GvpdvT+DguxzdzO9yr3qY+qPxgzowf1ROxIkP6A75y/sgSsVGON7DfsFfYeL+Uz+R/4IeVW9WH1JVMdVn0eTjPX06P0LXOzoWwiO5c1sMvZanYzu4PtYfvYx7yYV/IL+RGlQVms/EUtwT1ZbVR/a7jGsNb4cbQqujP69+i3eoF+DU1EPFyF2f+e7sLKOmkvvY77AB1iBmZjibg15mdT2GW4r2TXs3vZRvYwa8co+9gh9gn7kn3NfuA40HEjT+d+no07wJfwS/it/E6+F/c+/hn/XvEo2UpIGaSMUKqVhZjVauUm3E8o76pp6l5Vh58LDOsMGwwbDY8athuOGu2m35jJvPvH+47nHX8nStE10XXR1mi7/i5ydCpiKoN8eE4n4nxVhzPmcpxRH0Ccv8zs8F0ay2Mj2TnwzEx2AVvMlsOTV7P17AE598fYU/DSq+wI5pyALwcx5758EC/h43Gfx+v5Yn4Tv4W381f4McWk2BSHkqzkKaOVGqVeWaqsUNYpEWW38rZySPlG+RG3rlpVn5qtBtWQOlqdqS5T71I/Uj8yzDC8YPjAaDUuMF5j7DB+YRpsGmmaYJpoqjHdaNps2m+uRXTuoCfoz6emAnZQuUopV56gG/gANZW/yF9EPM+kOcpYjkjlG9kafgVr5zmG5cbhfDgbR0fVIHz9DN/Av+HDlbGsgk2mC3j/WG/GJPURkd/UHXRYfQprexE9Lzfa2ZX8iNFOrfhcKcSYf1P6qSHlBXpDOcBM6j30pmplHnaYP6RMQBT8RR1pqCK/cic9pixmV9ATHGnR+oP5OsTxOPYI8kIlK2DfKfhi5+MQRUOU9+i3dCF/jQ7jOV5Dt7E56ly6gQawy+kjehBPRS/DRcY8YzJ7ns9Tm3kP1k5cfRirK2Q5TDEk0dWsRllvPMJfxyl8r2qld5Q/YfZ7+WPKWPWoYRJrwBNwBV1Di/WraIWhSn2JzSWFTaVc9SCy2+VKgepHuRJZZQZy2mY83VuQB4qVsZB4ETnnIC6mIEOsx3078oSKCJqHZ3wastiL1G6s5B0015DIkHXwBfRCdBJN1x+kO/S5dJF+C/VBPlitX44eN9IHdCNtZKuil+G8n4Un5x12jmEU32sYpffhzfx1PpmvO31/4e1c5qVPcT+Gykh8Jzerr+J1U6Rfp/8D0X0GMuwdNIvOpvexys8xwhhlGw2IjuMt+ihlEdZ7gCbqD+k+ZqUGfT6+8Z+iB0wGqjOFsMcR9hLWexnV80n6UqU+Og9+uBFeCMNby5B/rg2XTqksDheNPHPE8GGFQ4cMGjigoH+//L59eofyep3RM5ibE8j2a76szIz0tFSvJyU5qYfb5XQkJthtVovZZDSoCmfUuzwwqlaLBGsjajAwZkwfUQ/UQVB3iqA2okE06nSdiFYr1bTTNcPQPP/fNMMxzfAJTebURtCIPr218oAW2VMW0DrY9IlV4K8vC1RrkcOSHyv5mySfAN7vh4FW7m0o0yKsViuPjLq4obm8tgzdtdispYHSemuf3tRitYG1gYt4AotamGckkwz3lA9rwZd+AiYVSQuUlUdSA2ViBhElt7xuTmTCxKrysnS/v7pP7wgrnR2YFaFAScQRkipUKoeJGEsjJjmMNk+shtZqLb23NV/X4aRZtSH7nMCcuhlVEaWuWozhCmHcsojn0ve9J6vo3F1atfrU1nSludw7TxPV5ubVWuTuiVWntvoFVlejD9jy3FG1zaMw9HVwYsVkDaPxVdVVEbYKQ2piJWJVsfXVB8qFpPYCLWIJlAQami+oxdakNUdo0gp/a1pauFM/SGnlWnNlVcAfKUoPVNeVZbQkUfOkFW2pYS319JY+vVucrphjWxIdccaecCpTf6JNclJdcBWTTniWiRkFzkJARLTZGmZSFcCahgqoH0rNs4dCDVc1g1VkDnZkXsRSWtvsHCbkwj5iyHUGtOavCREQOPzZ6ZK6uMSY6/yaBCvi5ESoob2Lj4RCkbw8ESKmUuwp5jhS1gf16X1xBw8EFjk1FHAfTYBv66qH5cP9fr/Y4LUdYZqFSqRpYlWsrtGs9FYK54eqI7xWtGzrakmeIlqaulpOmNcGEMnt8n+SkiPm4Il/DmdKj/KGYRGW8h+a62PtFZMDFROnV2nlzbVx31ZUnlaLtQ890RbnIj1Kq5R0Hud4uiJbEZQzTiiLSpU9oubin1EG9ZwOkxlRKSVMGxVx1o6JYbXV7++mUYd+VFjJ4qRZfJqRYaHT68NPq582PXuzggnjVVlROb252XpaG0ItNuBZ8QIRT5VVfq00QlPwZObiX4e+baig6vRIGC4rFQqIv5goXj1NMT3OV+MS0dmn9ygkuubmUQFtVHNtc12H3jQroDkDzZ18O9/evKi8titwOvQta9Mjo66rhq8a2DA8FJxKWgJszcSWMFszeXpVJz6ztTWVVa2c8dLakuqWHLRVdWpEYSnlQiqEoqKJClUwLLKVm6V+emeYqEm2qlIg67M7GEmZuUvGaHYHj8mcXTIOmRqThaVMXCLHlFZWnRo98pGsxmcdLO7CAXs6vlUclMlSw27Nx0rNGZlZmL3LmeUgs6dDj7bb7SVTwHzZbrNJ5ptwtj0BXFCzMF84IYFPsWhOJ9DqcAC9UtKhfxXuabcbp1jSfJnORGHqtCbAzGkX/Tk1pmEV0o7QZbswlYywBnMMw0rm23bRC5jvwrAHV5M1fIY35PwmJK+aEceBI+LVmsMAKhpxfISg/v1KV4QHK+kms9FsMKtm1ZjqTfNyo81qtyZYFWNySlJKjxTFmK54/MydCPCaM/wsxeryUyjEQqE8XFexmgEuf4EnxZPiTk7iiTyQ6y8YPGTw4EEDgz2DAf9d7PtHp19ZvbRx3KU371kVbWGFNz/Qv3zsbfPHbYruNmxJzjxnVnTvzoei0YfrCjYN7l/+yYMffpuXhbXfi/OL+E60UXs42WjIMptNJlJU4XyrJctGZhOCNJzvdA80VSpna1YtgVvTElQLF/6zSI9arGIjLN325bF2i+WERDr1aJdT7cPP9YbGOb8Kdbl1rPTrOOc3NWO/ev+kT92F+SOcwrVwSrI/TveqOT/epYR+/IdytWHLpmjRn6IJmzCj+xFd2WKFzN5JCVhMSo/kgaqSZbHebd1n5VYD5zYzdqYryMxdQWYWQWYRazNrJpOxQ/9crgnMl2GbWJTRKVaE+sFwns1mnGJkYj3GmqYElsBt0kM26SGb9JAt5iHhTyum8J9cFbZJX5lFr6dHX0rcUVoC0xImJNQmLEpQh1d7QzWLu2LxZDTWxCTwlEA4r2hEYU2+DEkWGuCC70AB4P3b+bHt248bDVuOP8inHxvF246PxUzXITby4DkD/SZsZxw+M5BZU5nawR8K+01ckUtU5BIVuUSl20HwzU8eKOPPPVAf1sT2XOy02Ot12/lLhi3H/rVJ5I3Z+keGtw378X2dzlLCFWkOluRMSkr3pKerqlNNsnls6erDns2JzyQqHo83nWuZYdf4HuM94bQqQ5VlmnOKa2aP6Z6Z3qlp09LXeu7gztQsRXFn2SzJXbGQ3BULySIW5BKTg5qJ4aH4SsrBfNwuVmsS4SEWCeaoXCSYT9vFBkplsUaT2NkisW5TWlMmy3RI/zmk/xyyc0dQuM8sBGQXAjLKEDBKZ6VmzJ5x4vGoGSuyzLiuTe4SUNHhosPY35rFVFNTs7iHk/wFqkgZaiA7hw9x0oACcg3kwUA2zWZr2OAX2KhH26Obt+6Nbtn4HMt89U2WvuKTm1+Mvsp3sQXsj9ujD7x1IHr3E8+x6U9Hv43uZQNZehuz/S76Afx/D56sTYgPL2XzYWG/25bI3IMzpvvONy/wqRanWLJZokliDiJfeiZBOEQw9i7G1sW4O/RDbe60gSiPtmX3HOgS9cyeA53x0hEv0f5aW2Yw1g59Z7wU7eGzwOQmnp1xtjbZNiNjQcYSy/LEFY5V1jWO2xIednQ4Pk78yOFMtNs1lyPJ5XK4HHaLO53701KsRnzLJNgNXoslxZOWmuURM46/b7aFk8VWeDzkzxbZkbxehyPRnNUVKlldoZJ1Im1kBRPvNIoAiaeN2FMg88VAmTmMwi3GGi1nUU5TjpKT7ZUB4ZUB4ZUB4f1fPlDxVGH8aaqIP1eB4Rt/LqfGAyf1fW/8beXEHc+todBxVArz3Z5C5vIUrk7sGzJc4dwpwip06kWiP5zQwlZz2FHocA5zuYdBVM0WQ9hJifo74bTUQld2aqEblBjOKHRmJ4F8oOTCeCfV4sWWgg9JowlvN0+PgNKX45UWcEEs328B/z28eefuS3e9PPaMKefoX22fctG0Pv6Kd9k9q9aNu+2+aD/DlvHPrbjzlczcnHHLootZ/6uvG2ozHV+mDBiyYnTDNeKvsalEpotFpPLLO8mhR4XTSqZw6e7EWFbCw9ehH483KCca5LMpjhG9BKcaY7lOIJfbpMqDhCKR2+Nm4nGXZp92MV/JERDv+9ttkBjA4J0BrhcFXb3cQW8hDXYVugd7z6LRrrPco71VNM1V5Z7mdd5uvt3BW4zi/BQe4GRpqaHkgYaB9jJDmb0iudJQaT83eY5hjv3C5KWGpfbLkh2GZLtCzG0mswMnNcRpkbhc2Moa5nIXFqaHsxTVYOBGE156VizXkpDocNjxGe9OTvF4vch0I9oM5NVEaXe7RBmenmy2aIQ3pcYoiTHyGszmrGRvUnKy1223WLKS3WDdLrvDoTldSU6ny22xm73JBofLaSeOKRkUr9PhsFjMZo45ed1ul4vMaR5PmrPYwiaSRnZgMihMBjZxs6YxxlJTO9jalljw1qSljj2e5j1+PC31uHdceX3Zhyci1hm/RbBifa4uKixcPbZvaPUVO1f39f60QOCtTnTu3AkYsbOLOxVYRcQxuSLiwoG11W314pEbOrQawlwI8yDsJBKnd6qI2CBJhKTNHjaEoVSN52RJTezsdvrlZwN6pHgGD0HhRtFjAAuwYE+jibG7opc9eyAnbaiVeT59aXwgo8+HO6IXPRl9oafJkxR93rDlx6Lbfv/PHOWd42nRz/61tl157NgoteY6rX70D/fhCN1JlcoZbUGvb99TSi86COJKr9ZQpq9T6alktg73hTuUQJs7ucBR3EcR+SRfogZcCHoctFURv8WYqYgzoRO4EtQEehy0FbQPZCQCilYNtBC0AXRQtCiZSkar5nMW91RSYZuKt4ND8dARkA5SyAfMB40HzQTdCNoAMko9IVkIWgnaCjoqW8KKp/WWAZi7p3WtLNoumF8gq3Wx6owaWW2bVh0rx06MlWVnxdSGxdT6D4yJ+5bEyp69Y6U7t6BJlNaEgm3FKUoKFpmCiS8CMr6THAh0H92tJFMExBVjXBJW3G05wYINWxWVmMIVRnPIp29TWGuCq6DYynV+hNzk45/zw7EWfrgt0VWwofhsfogeB20FKfwQ7nf5u7SSHxQ+BxaBNoC2gvaCjoCM/CDuA7jf4e+Qg79N+aAi0EzQBtBW0BGQib8NdPK3xDe+RMEXgbj47Qtqb2JZbwId/A1wb/A3MLWXW4cUFnRKJpQfZ3y5ccaTHmfcKQUd/KXW73shooLYaUTUk0o2jaQBSnZrbn9fh+JtHTHP18Hfa9NCvruL+/H9FAFxzGQ/Rt5PGmgCqBa0CGQE9wq4V6gJdBPoblAEhCgDOkEa3wXaDXqF+oHCoAkgM9/XimE6+N7WYImvOIW/yJ8lDzy+hz8ny938GVm+wP8my+dRZqHcxZ9pzfJRsQ3tBBsnSifKfLQb+F/bctw+vdjFt8J3PmA+qAg0HjQTdCPIyLfy7NY5Pjc6eZJ2mQmarfSJLB+ke80UvsAXDpYiADUBwWFnggNs0DYEeTi47g5UBQRvuAWcgODV14ETELz0KnACgvMvBicgOOcCcAKC02eCExAcXwkO0MHv+nNOT9+Q8RcyrdjBL4GXLoGXLoGXLiGVXyJu+l4Vc/tDa14ePLY+HOqV52vawpqeYk2TWNO9rKmeNV3Jmq5iTSNY03msKcSaMlhTFmsKs6Yn2VC4oomF20+rFoa9rGkXa9rEmhpZU5A15bKmHNaksSHhDu5vPWuALMpl0VYsHjqUZ45E9nFwPzzqR8z7kRO2AveCdFkLQ0nLjimnZokyuy2vKFbvO6xgYfEYvgOGO7ANO+gASMUG7UAY7UAnO9CBA1gEmgnaBjoC0kFGaGdj4jdKdADzQUWgmaCVoCMgo5zOERCnhfEpPi4nlh+f9HhR4ztwiz9i+bk/nOnMcIacY5QbM5gji43P0rP4EEoRv4lwu8yuDpaw+duE775NIEuxhd/Ab6RMbMRN8fLG1u8zfR3s9tbgk77iZHYbZamIOlZIQZaLcig1yvogyjCLciBl8EdRFrRmTIWZozXY27eFJQqrzb7vM973fZLRwcF+nPGk71WtQ2Wtvn9A8uhm3/6Ma33P53eYIXkq2MFQbNGkamfGUN+mXVL1KjSsb/VdKYrNvisyRvsuzJAN9bGG8xpRCzt8k4LTfWPQX1nGLF+4EX1u9hVlnOcbEdMaJGw2+/phCqEYm4fJ9sqQgwayZIdThnSwhnBv0zpTlWm8abCpwNTb5Df5TJmmdFOS2W12mhPNdrPVbDYbzaqZ4xiTJM7LIXGKSzLKH2gaVfkDO8k7Ocmf1Mmf3XFm5nQ2RXooFbxicgle1ttmU8UsLfLN5EAHs06cHjEESljEXUEVlSWRoaGKDpM+KTIkVBExTTi3qoWxG6ohjfA1HYwqqzqYLkSr0sX/rXcSY65V16eL8oxV11dXkzfl4iJvkXukq3BU2c9AbRxPeft7T+MzI+sqJldFHsmsjhQIRs+sroj8Tvzneyf7kh0tL+tkX4iiuqpTGcm+LJ8k5MrIsurqig42VeqRxr6AHiLmC6lnxotZ6JFmzorprY/p5cIeejmigJ7FQrlSL9dikXoqE3otjTnlZS05OVLHo1Gj1Gn0aKfq7MqFTm6u1Elpol1SZ1dKk9CJjJQqGRlQycqQKiyNMqRKBkuTKlNPquTHVa49oXKtHElhJ3UyYjoJB7t0Eg5C59/PVb941ZfgFNY2vHr2DPGHi9pAeT2oNrL24gZvpGmWprXMro7/RSNYO2t2gyjr6iPVgfqyyOxAmdYyfMbPNM8QzcMDZS00o7yyqmVGuL6sdXh4eHmgrqy6bfSEgUNOG+vaE2MNnPAznU0QnQ0UY40e8jPNQ0TzaDHWEDHWEDHW6PBoORbJGJ9Q1WKmkmp8cMmyjdusiNfadH91SYpz0UgZvMP93ivTt+C0spFsoeqIPVASSQCJpj7FfYpFE54p0ZQo/joVb/JeOdyfvoVtjDc5IXYFSii0dFnjMvKWzyuL/WvEBdHSZcLhMQw1/tKFtvJIuK6scSnh5JyHk3MRTs4tJhOktWJJkWFdMputHF/dMWFfCIcJoaKcUBSyEUJmscQVf7r/y+Kl/Bxt4k+2sXAWW0qN1Uokq6KSIxVUxv8MsAVnKfF6aKzGAhtZiDV29RGfduxrVxRizV20dFmci/tiabyMWcKkscslJy7hrNAJjy1Fh+JSSGHiMigK4/IL6zPbNvrOrBNSoB4lC1n042Qlq/zNjA1oJzswgRKAiRId+OI+Tk584B4nF/BHHENdwB7kBiZRD2Ay8AdKoSSgh5KBXuAxfCF7wKdRKvh0SgNmSMykdGAWZejf4+grUKNMoB8H2+8pmzRgAPgd5ZAfmEvZwCDwW+pJAeAZlAPEdy4wT2KIeurfUG86A9hHYl/KA+ZTCNiP+gD7A7+mAuoLHED5wIHUT/+KBkkcTP2BQ2gAcCgN1P9FhRKH0SDgcIkjaDDwTBoCHElDgUVUqH+JL8xhwGIaDiyhEcBS4BdURmcCy2kkcBQV6UdpNIWBY6gYeBaVAM+WWEGlwHOoDDiWRulHaJzE8TQaOIHGACfSWfrnNEniZDobWEkV+mGaQmOBUyVOo3HAKhqvf0bVNAE4HXiYzqWJ4GfQZGANVQLPkziTpuj/pFqaCqyjacBZwE9pNlUD59B0YD2dCzyfZuif0FyJDVQDnEfn6R/TBVQL/kKJ86kOuIBmQX4RzQYulLiI5ugf0WKqBy6hucBGiUupQf+QltE84MV0AfAS4Ae0nC4ErqAFwEvpIuBlEi+nhcAraBHwSlqsv08rJTZRI/AqWgr8DS3TxW9BLgZeLXEVXaIfomtoOXA1rQCuoUuB19Jl+rvUTJcD19IVkFwHfJeupyuBN9BK4I10FfAm4EG6mX4DvIV+C/wdXa0foFsl/p5WAdfRauBttAattwMP0B10LXA9Nevv0B9oLfBOug74R4l30Q3ADXQj8G66CXgP8G26l24G3ke3AO+n3wEfoFv1t+hB+r3+Jj1E64Ab6TbgwxIfoduBj9IdwD/RH4CbJD5GdwIfpz8CI3QXsAX4BrXSBmAb3Q1sp3v11+kJuk9/jTZL/DPdD+ygB4Cd9CBwi8QnaSPwKXpYf5X+Qo8An5a4lR4FbqM/Af9Km4Db6THgDnpcf4V2UgT4N2rR/0HPSHyWWoHPUZu+n56nduAuegL4Am0G7qY/A/dQB/BF6gTulbiPtgD/Tk8BX6K/6C/Ty8CXaD89DfwHbQW+Qtv0v9OrEl+j7cDXaQfwDdoJfFPiW/Q34Nv0DPAdelbfRwckHqTn9b30Lu0CHqIXgO9JfJ92Az+gPcAP6UXgR7RPf5E+lvgJ/R34Kb2k76F/0svAzyQepv3Az+kVfTcdoVeBRyV+Qa8Bv6TXgf+iN4BfSfya3tJfoG/obeC39A7wO+Au+p4OAI/RQeAP9C7wR4nH6T39eYrS+0CdPgD+N6f/38/pX/zKc/o/u53TP/mFnP7JT3L6x7+Q0z/6SU7/sBs5/f0TOX3JaTn9vV/I6e/JnP7eT3L6IZnTD52S0w/JnH5I5vRDp+T0d3+S0w/KnH5Q5vSDv8Kc/vr/o5y+/785/b85/VeX03/t5/Rfb07/pXP6f3P6f3P6z+f05379Of1/ABquEH0KZW5kc3RyZWFtCmVuZG9iago5IDAgb2JqCjw8L1R5cGUgL0ZvbnREZXNjcmlwdG9yCi9Gb250TmFtZSAvQXJpYWxNVAovRmxhZ3MgNAovQXNjZW50IDkwNS4yNzM0NAovRGVzY2VudCAtMjExLjkxNDA2Ci9TdGVtViA0NS44OTg0MzgKL0NhcEhlaWdodCA3MTUuODIwMzEKL0l0YWxpY0FuZ2xlIDAKL0ZvbnRCQm94IFstNjY0LjU1MDc4IC0zMjQuNzA3MDMgMjAwMCAxMDA1Ljg1OTM4XQovRm9udEZpbGUyIDggMCBSPj4KZW5kb2JqCjEwIDAgb2JqCjw8L1R5cGUgL0ZvbnQKL0ZvbnREZXNjcmlwdG9yIDkgMCBSCi9CYXNlRm9udCAvQXJpYWxNVAovU3VidHlwZSAvQ0lERm9udFR5cGUyCi9DSURUb0dJRE1hcCAvSWRlbnRpdHkKL0NJRFN5c3RlbUluZm8gPDwvUmVnaXN0cnkgKEFkb2JlKQovT3JkZXJpbmcgKElkZW50aXR5KQovU3VwcGxlbWVudCAwPj4KL1cgWzAgWzc1MF0gMzkgWzcyMi4xNjc5NyA2NjYuOTkyMTkgMCAwIDcyMi4xNjc5NyAwIDAgMCA1NTYuMTUyMzQgMCAwIDc3Ny44MzIwMyAwIDAgNzIyLjE2Nzk3XSA1OCBbOTQzLjg0NzY2XV0KL0RXIDA+PgplbmRvYmoKMTEgMCBvYmoKPDwvRmlsdGVyIC9GbGF0ZURlY29kZQovTGVuZ3RoIDI2NT4+IHN0cmVhbQp4nF2RTWuEMBCG7/kVc9welmi6snsQYdcieOgHtf0Bmow2UJMQ48F/33xsLXQggYd538nMhNbtU6ukA/pmNe/QwSiVsLjo1XKEASepSM5ASO7uFG8+94ZQb+62xeHcqlGTsgSg7z67OLvB4Sr0gA+EvlqBVqoJDp9157lbjfnGGZWDjFQVCBx9pefevPQzAo22Yyt8Xrrt6D1/io/NILDIeeqGa4GL6TnaXk1IysxHBWXjoyKoxL98kVzDyL96G9Ts5tVZdrpUkZpEdaRHlqhJVEQqWKJronN85V4v/62+N8POUcYuqdLprk750F5Y4z47X631Y8ddx3nDpFLh/h1Gm+AK5wck/4erCmVuZHN0cmVhbQplbmRvYmoKNCAwIG9iago8PC9UeXBlIC9Gb250Ci9TdWJ0eXBlIC9UeXBlMAovQmFzZUZvbnQgL0FyaWFsTVQKL0VuY29kaW5nIC9JZGVudGl0eS1ICi9EZXNjZW5kYW50Rm9udHMgWzEwIDAgUl0KL1RvVW5pY29kZSAxMSAwIFI+PgplbmRvYmoKeHJlZgowIDEyCjAwMDAwMDAwMDAgNjU1MzUgZiAKMDAwMDAwMDAxNSAwMDAwMCBuIAowMDAwMDAwNDUwIDAwMDAwIG4gCjAwMDAwMDAxMDcgMDAwMDAgbiAKMDAwMDAxMDExMCAwMDAwMCBuIAowMDAwMDAwMTQ0IDAwMDAwIG4gCjAwMDAwMDA2NTggMDAwMDAgbiAKMDAwMDAwMDcxMyAwMDAwMCBuIAowMDAwMDAwNzYwIDAwMDAwIG4gCjAwMDAwMDkyMzkgMDAwMDAgbiAKMDAwMDAwOTQ2NiAwMDAwMCBuIAowMDAwMDA5Nzc0IDAwMDAwIG4gCnRyYWlsZXIKPDwvU2l6ZSAxMgovUm9vdCA3IDAgUgovSW5mbyAxIDAgUj4+CnN0YXJ0eHJlZgoxMDI0MgolJUVPRg==", + }, + "name": "file", + "label": None, + "show_label": True, + "style": {}, + "elem_id": None, + "interactive": None, + "visible": True, +} +BASE64_MICROPHONE = { + "name": "/var/folders/t1/j7cmtcgd0mx43jh9nj_r9mmw0000gn/T/audiovb4gqjpc.wav", + "data": "data:audio/wav;base64,GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQRChYECGFOAZwH/////////FUmpZpkq17GDD0JATYCGQ2hyb21lV0GGQ2hyb21lFlSua7+uvdeBAXPFh1upJLeC6SCDgQKGhkFfT1BVU2Oik09wdXNIZWFkAQEAAIC7AAAAAADhjbWERzuAAJ+BAWJkgSAfQ7Z1Af/////////ngQCjQdyBAACA+4PMpH/n1EPs4MPlDak5Fzh3pT23QOozrpMIemMucj6646WZTq/qWAjImUB4j/aEtJ08SjAyqjqFq+2zZ5BmqSKaDZJtE8pZRnh7pd/ez05WinXc/FkOyULyhFtAKY7v5MAAAAAAAAAAAAAAAAAAAKADzFGuPnjkNLV2iu/mGqmEkZOFkTDa9XGu/V+C8YKNhgXB0voRMsMX5rHf2WcKpFvpWqoiFsq5scEBbG0cNIjdGoU+Z3Scu5r9OMpyp0ETCKhFwi+D/g/ukqguM4i4rX7bjr3/IZCXAiOQ40t44c3thLsE9d7N/U6uePnhBMMh4hOCoQEL9bQcJHJpEL8EJsRPhIMhSZI9/aBmdmUAb56PS8k6qVyW57IMTYbCOJ9d0wjC1rwuLwUWeX6YCLfpX3T2QXdSsjThYFKwgsJm4i33Piwe/liwLaUeKfa4XjbkP5zsHX4C78gpFRf77q3Pg5bvCukbN416f+vQiBunXlcZ/RSdUXg9phF/TftxJ8NOk+sxY19g0TVRy2UMBV9uVxW9nFrQCLYxhOK50MLQDvEtRzEFGD8rvpc3cF7vKRFT9ObTh9vUx5pZ5Z0V7xngOWsz/DlxbzMBcRYOHeczi0aYAQZQqgnfAaNBO4EAPID7g2RpPsoN+j5Q9aclGv5D1s9CdoTT+mmvJ26cRh1bNNaI2isW9knZ3H+8hpVtpGfeLsG+6aQ8kkThDo84BlIX26mGsWfAaZlM0eJlPWlqxudzu2IFQXqLOzk819lC3X3zG4c+9EVLhEDepIDmRnjv6VCyjH6HmsJKeuZo/Lu0k/7RQww2vY/i9azLH5f0ew0XFNrHruB8MgFpwwzVxQttXpwhHTAl0B1zujsaaNX1+6vYSsv4DBORFKQiPYb69Nc+Sd46gbcItW11c6DcmdD0Jj8XOcNtjXKMryjRWdmEiYrAXVUTkZLsnIZJxpH3Dzs0V658BEWYfgNsrlVi2/8KaqOFpPXMyoZ4M1sWKtk13pRAk7xeQS0OqLKSkn8rzX1pPkKuONL0/vn8KKi9auAWZBE8+0u0JKNBe4EAeID7g3V/ImgFnHyflxJxgesfQU/hEw2cW/PTo6SRV89BxbmEbiiUEffK49yo3jalZn31EOX+GrVfONzQDcwz6+39msxgr7yRHJBXlCrGuDPhZn1yEg0nbQoC6cuaiocVGYivipU4B/cVG+SM/1JUZ1dOSMSi7IzUx/cIPxL9L329mCSn+d7e055zJthQaWzB35p0XbeLEmEDGf2xbm4Bt3eg0ROZMmKHC4tsVohbvjurVAhm31fk6KysYxJ3txAuMC6A6mpQMFmo9ADCLqwFP1rPFcR5+DNMCG+m4dvKSmF71lXvKi6kIVEP2U3KIsekd0GHY6W4QpybjUBlcIvjEwFMJcGpoeBpVZ5O+HEIONYCOJ8y4Z68uThakypsLKgqkPa4bvnATI6Hj9WLkg43nnLxWXFIobaw6mrpqR7+JuwtY4eL37PP1hTYv6ypROfDtonK6CtKUZbae3Atqgk8dsiYy6f7UXPmovQcgK2j6VCK+k24/T2rrkqjQYOBALSA+wM746KTKovZvocJZAogLOpprNkJuKrxFmMsLcdV/47iA8juYNVF2DA+W4KiFx6t7bflq2DELtamBLn4H/5wvv3LBStiTBgg1fgcO+p1iWuEg1RqSvLOVJE6oVZUrRxqtEWRewOCVDMNand7Exdc4rsjl+d8TMeMdalskYwKiDRTPxjIu7jr4sFGehIAL5bod1tiEOq7YyPdSliPnxRT4VbrICMoy80t5E6+2H01d2eReYzsRtuP4uqAudLvM4zL/2pWwH2wC1QGEEIiKkDFAbAYPFmwqKxMEzm+uXr5xnbMB69B6pyqsp+yq9cWoT96Oh+XMMu6DmtVN1Q/qzkUET8zrXOb0sJ817V2Zaj+0QVAlmhjFVGE1q72JcXE2+PN/KFXMooVaS2rXraiJYiXCsc9FcmRo/JVjf51LVKtyaxGp3syZghPwnyiNhGpbXCA0yDn+qsx7zItsxbmjL3eG6mwI0jkdxMhy55MpbCpqBESfIiZiw2IHXxQtI6KPaqjQYOBAO+A+wMaWBXecBWrz98jGAjM2LAvlKxUqbKiDsOE97P6bQkKXREtptUPWrrOVJzSgiTue5uAOfnKc3lHkixhmZiIC6M+hmmWc0NxW8OekQfhpmE+juG6BoUE3FTKuRPrmGytfqahopLAtWxxvNDgX4TaoqylsdgXpMaS3ZinkA1UvsYQPxc56FIj4lFeF3f8ea39rtA1JzZka1asIQJl8wor2zoRzCW6+jX6anhLKEBjCuPy7TwZ1ACCpU1tw68DvFN0nqNpAsb0QdYOst2y8CjU2QshwUQIPLMhws+PipOdCawbkX/VltWSl3DGmJGx88lRf1AsGvGmykCkfuqXkTbVuUPeuFwHYNKmkcUs99U8aYYZyiOv8BjJzo3vQmYNAIrb+EcjUIlSE3ecrAVZv2oBGY04Ntf9oFYPUGWLRvvd8UswScVxAFToUISFozdpgrfZwWtYikqw8sTkxZRI/YDXY2Epk2O8w9XMVYdxI4FojNsKQXpYFnolP5vyPdmN17OjQYOBASuA+wNPuyhaEA457BBCiSmcmDmjbP5UFKpdUvdWLRXtxNZpxos2I1ZK+f0xmwbZx4Oq5hBWsNBBwdsd9zReiOwY/nl/gUEUynWmfNvDMLRfwb47JQWL+kqgDLRN5WPJTXTpyXvVRoI4amc7Wjbesai+EG8PhcpuABFMJjNbcU+aGMJuT7rfb/PeAuapGwtQefLOeJG7ELIHjqe/Ehizufd2dhXL91M3E5syhmGzdrP5Qox/DKeQxt2f5QXr+S+YhpoHbzMI6hCSPBePzb3hdbbZ9kbabpnWBWZreAsINDgcwV4Yjx87NpZ6ThjvpqFL7GniPcqU3CAx5e35PXRwR1DgkSIqi4GEihWD4cKFWzDrxDAf4hSvvGLFBiVgu24oaJLNgqmBTunmozN3leeRDGK5RBq8CQ/1a/jPQxpKJqwP0HvfM62cutODtObEl6hOg9+MXSb5h9JYvABoo3oZa+WYiWCBl2z7WnAFN7fJsjteYtuvDUON/O9DW0v2YzNdTNOjQYOBAWeA+wNQbXIGz7NpKk31vLNIFhBPBHrdfP7xiV0usIfr3zJa4B+VymnG3ytGfixcorNxhKpbCs2H1cLrWjhSM9wcVdcRSWfQ1T12E5KV58cWTkfTEF9bW7H8cXhlcSvvgkjrWaQfIx0eA74JVzqFXx6BXdd9sZXRRmaOX8Ad+mz0fu5mIlwJW9KSk/M3g5W4ZGo/LslHWpPLfQo+7OPokpNR4WNCUdralfz7TBza7XMaWCGeYnUYFLf1POjtxvzdMgMMxZ2pDcW76i4k6roOCGKWtjAC1wAE52lir7r6YUeqQbT8QMDFeIWHSOlSVZnmrgMalzfW5HB8UEDMnWsXNYYMGSJKffDXXH2rBb0GXJg8mYatPspytQUu5xyQOWJddWkgonoTU4mFWUSohuUcW2cpKk1rpdJpNKod0fpH5RyoZnAZZYXzeQeLA7sJ4LwUZ6OGwj4ZhZlvWxJRkIQtGJX1jgsyKAVToAwrYr5lI4pTHnj4bA/yiDkCjD/q1jeZsuujQYOBAaOA+wM/NZhxY3E0H687M+siqrTCmh9MPREIILn/hrUqspKTCRXlMIJ/PZeUsDAcyrRgWHR7RM5ah/IvKdCsJKLU5Q1nMGESjH90HaNBSHf4V/Fs+PVHqZdKbA9tt2lZJ3TINcySP0sw+99rHZckGW51Re684SKYmIZm5+1vxKGrdGImUXBz0zG9xkr0kutLvq6RhzvvYhj9orQvovv3/mvt6yQAXZ+Pv2lgC8iQXN0Y4/HS98zUWoPOcZklWrCt6dUB7JI/P0xNsTExjF8/wnDe255TT2uR5NcFJI4clXPaDVcUApXdBa0H1NzIb07WHX2nHpi05c+PYN+c65UVf8FnND8gDjByXsYy7Iqz8aSmIKULKM6iPi8GbhqkamKHLsTXIhnFih30L8HIAjhnleY7FiOxrIukUt3K0fXHWVVpyXklL9J5u/nuRV3epKbtTncXQu1MRf2S8vkYW2GGgX5xCBwoOwkESScUf9xWDwYqVz+VR+Gs7DKQWWnarIsg5XqjQYOBAd+A+wNAhhKTNez6jmto2HjPkkOFDiSfmZnHDYtbOb1vTXN8Rbs9VbTdLYwHbw14DpEljDRsQCBpvaAQQix+iBWCixroQ/dJkTS/2KnYzFOhlKaIQEffrhpW44LQM0pTabthfXVQit1fGsCsdr7zPOR2mrlb5ccvVbHcriovtP6lGzuWPOBqqQnuXKLkyPs6Y0Qa+9gAujc+jripZJKFOYlA9MSwgliyTOJbTkfI2wlqqTKKoU1bcZDQpp5Ye2Er6GaZo7ZGVn1gvz9lDOSMCMyr4Oq5y6Xktzw3CGM6UGX7SXMAOtbt2RjPaHtuXrAq+0qoI4+WbXIiscQqeItSTn4ikSLFJqymv4xvxcJQRfJB06y7ZpT3tx5A98/F/qDo7unBCn7veNDgQGQLcmimpW9SX5oQraYkndGHvNlFDSDOAsKOK3IJD7uekmUcr/WYVqArzNBwTrZ5tFQuZ/8JQo4xwX5Az3aG1fSMtG0l8i7jlER7MCybZGkjIq6MT2A0NbGjQYOBAhuA+wNETRKRPUv0GQWKTjosJhcXb995F1P2wm2q2Ol6kvyTxdCbaQL8LszJISOeAUYQhoOfGPW02CnVbW91T8PMnnj7qEIxbO8RdQhqJsTb1Ssio7Tu3Pshvnendh68/uAuB6sJywkAtWlsQAhOspjcSb8w+WY7JoHJUml9yJ2IUDIvQIEBQ8u1w500gsyRVh5cwpTVtng7jW12zb+AUriGGLmO3ut72EuK3uYtFNSInpI63kW1+poJ3e9H0Ejy4CDRd/76/mtifMI0l3OuTR/a+IIoN5r89222HTkSKLS587VDvvfyoKoj7IAlgQsjY4OqQYKsOFH+dVjs/8KBkYU2/T+Ruv60j7K6zURZ1027AwH5Mzcaf0Vv22hzoIuVhUb0UwHP029fsJQnlqH8hWzzaPcBmPreenDXWsne0HLoKsB7OX7r4ns/IHscX+MVNWHCYRumXwrH6y4ZS+nSgZyG9iPoEfgEWEloE9Y8SZdWh/9OgMteGZqteivn2g4rPSejQYOBAleA+wNQHGwm4JvyZW23Pqd3njZ31QMzuGuZLxXuiRWl8JR0b3PfiNBBxRxv00xBhQS+VrpOCeMRA/YdnecYyI+6knzQTazpTHGxU6S3pAO6elaxcBswmYTl+hSlcg4QXIgYEwCDEdWTpSRi6ALl3vXyvsu5Km9/iZnXGlSv0jM0ho8UIuwzq5dXAUJPlrXg/hAYuZZc9wOkCNhpXdovJHXFnDzAs+fVYYBmghzjGCPXItR2w255cEWmnLy+U0Sg9IOLRGr5lvmyEXKaNXLKIWUdrF/rK91OSPrQay0Djis1tK2xdIZLTvDVlr8K3IEKoqJrBUzAGHZo7h7dm80vlTnBGU/21CfjaMi9JStWk4Ua7Q7b5qp6+5W2Bj9fpDZ2Ub1gZOoTn/rEUVameFjy6hbIdRt2U+XvAu8wKERAVzNRgaa2DhOL0UKzZg7HHI5IZSMIkExBT2ybFrDRog6lJsT1hAtcTtx5Psz+IF8UpjRi++WgvIr8iO2KhCA3AzvtpqajQYOBApOA+wOaoKR8kBqXC+u69TtLyz+S8831alq62o+0U1GEKnfJa9AtlUNR1nZJpw8DlA3QkaXVGagRdmsEKP/TKwyWdvkOMZbKPpr1Z/4mNfnjtkU7jWvs1q3kXzrnFlRFyjlmdoMt1A0TfhRxQA12VFHu2JJE2grGlKWSYKvcluKbJHE1JNagDp/qB+9lJvxMJA2kkDBQQfIR0mtpU1DTHEK9yE7fyHvCwOiyiveTCshlsSJ7WvlhHQx2Rtn7qjJlpb2SyOaNFJ297nllufOLenMk1kB4blxu4DnSg/g0zdmSGtwR8RVk9sQEiONuVJZubqKtiX/jpEG1CaUde6+FzNM/fyIvDhbjFIjqxPdDYLWZNl3l5gCD1E54kKXeUMe7eDToWhk+0dGI/4XDIp3pI6a0SbsWxNk09UulucwiCZaPl0MenCskrh26NQ+Zd6LJsW6JfD79si1E/GKhB3LX0YcYvY/2HD/WcOcZ9JzNdwG3KMf1zX0OxXBrORAg7J7pQnCjQYOBAs+A+wNAf/DMyDZlK6zqR28ylj2JXQzg9e4kK5/vL75PNSiMO1tdchiii4UVc6iTjfYJXXjqG73LpuuQ7T1HtWj4u6hVQNg6SZts3qxlTpIjuWXdVMaeKeYc7x/DGPG0S4DVmC9U+z9IF2icsvHHxF0BoV53aC2jdlTBcV+vw8xeafm7QOrKTmL7nglxbza94cJtcaD5gs5Vfrwgoij71pTNiyZ9iDt0I3oLbNCAZeqMtSbp+PFnK3Tv+zhx2JKtM7PrUyHTW3qo5LREn+G+7EBUKmCFmtStGBP72FBROCzkZH0TTv1U5Gqz4JnPj5YBfx+jkQx5jznc3p1ldEZz6ysYl1GXN1fI4CsGygqvFzrLQAn5x8o9WrgtaYQxEOAWTHK1Vp9x1+X9EgA7RZV+9yalHCaKjBjLx7iea7pju/muJ27jlKygb7W2t0rj2xXlVJxxU2KXSn8atgwt4aGQBJMEavLgDP1Z+Bmvlo57X9DnTLbxP82j2chb6T/TcafjRu+jQYOBAwuA+wM9aYQ8fhQxZZsS2xCi8dq5DrCTihUpnjchwR5VGlVhZqycrEkjLIsJe6nCBs7RdeOKxphz4n1KS5FtcYRUJeR7sQ2sDW/NC3G1h3qyRMIj9A38wP6FnnVZvHIy0jGzgUeh9X6s/6tlMscE3fN1+hZaeCq6jD147dSsrOS+YW+NPUEjw5WJ33BOp73DuqlxpXeegP/gPFS52aZ5hZ7uz/WQkJ4qAgmEUb/J0iVdRXzO8/0XK00qq+Rp+cWLZLbDuoYHqK/xg8aMq3ZN1iQ97/TLkpe6RX0BI0ddUoiMTiHtlbcSf1KUAwQfGsUgRTJNIxdelIDzHS17DbyG5COPSRpKYWC8f4zsxoS8jHzdZE/kKUA0KIUP8AYc3qrfrZiLPdkbmqKn4ixlJEdnbPTF6IVxmCoeR1sKjJGjwWrUxCIrKDiN8K3viGPgsbsHytbfffzf6EEeUYxkFROPx1SFMgODw5GsnOcMozYrg97DD80a+DMr//dEjV6jO+IujEijQYOBA0eA+wNAdJcvOohN2QTQF4F/DpVelPfdj8pYus9E31VBsUUGHNaHbhjBqeo+/D2MI6AQ1NOHUteCsYt7dF7NIWx5JqH/uL7whC2fOSjBwHT5oPw8ZKfXIUwGbk5J1RZrdbVVfaYwJViuAeqXs/WdUg/2PD4gT29h9Q5fpq+vhFI1BwPaPxEZFtEv1t/+K7fNrmhNBYG/30bsBKVHbw5AmrSim6Dhkd/pGE5RG4D8ecsUvGlB+rnqACTHzs7uxY0gdTYq2r4WH2P7DeXqVcMKMWBUG76hI6IGKW7vBXNbF43Ap2vlJEmZURzB35jl5QkSbE1owbFLDHOoyDb+YDt08HeSKkRFgxHjKVAbSWeGMQhFDP5v9kszHwCCUnKRkpK/CR2vIqna2IBO0QsE49PTjmFBQ2plpBuprVOOXymr3jVsqy7902HVHr7rUfE28Nz3/ikOuBtgGy2KBk/Yxa2ksK2rePpck18oI8h2uYpt0wnaurMeOB0X+hHVZE1O/kSIBvSjQYOBA4OA+wM/WaFrl20Ui032X9rmUgKVbM5pprwG4iPi6fxUJg3gmiRJDgFgneXHJplCRLCx+F8qZa885m/GPHCqot6MZN8BJDNdnquocrEBezXh0haYqkjxDx085K1fWwVJCkMyCRPMx+KUg4A1XgF3OqjgWx+VHHj66mq2F0k9otZ0UC5qRC2Qq51JhgRMAJqQLtU8cOb08hG+QX/Yter2qSR+lLoLAikjQ+QQUOO0hCJuXA/gP6SXXH1dqLNhkASFpvbKsosmT/QLiiRZidbJ/6Ct6lYyOG5eP0lYRjrP6mK6mnOaKuFw5tLG9qxKw6IoeEeY7WI+A8mr94Wrn8kl9bKTsjy+zA+C0SBq6aUzeZQn5OtzH5O7h4u9MPOnAylvIEjR+bdWoQlK7FJOuA77nR8NHrb5bEbKMDfR/aKB++XizUvI182P7M6AwP8Uhyi+Hajd2qmBzGeN/iays/z3hP3ZPd7z45r0LIXw7H9zZ0UcxkJgXPTFbg7FjGACIo3mtsKjQYOBA7+A+wNA8LZSgbInqd+Lz420l4sGZEKHpdRbYp5yK2MIkNvrRkZ6tJKIJIQnGKRoTHslyhhrKmuGqWAwT3PuL33CT3S2kjXU5JzvN/lJTK7clyJc1PunTG2+ipQtq73aW/YNNA4LvWPLL1FB62kooYZrrLNsFnF1k65HLRtPwqZP0fhKIj3V/eQ31fhNcF9ZqINrTnZy7pm620I5gqXWUykwFgJUJh5Lp5G0I3pJu9tsmTVBLs3ArDnvTc+aiWyVCQSwZwaMsMNpQMg9opB9aP9+mfa+fqM3uDqr2+a8c4m99ZCLLaqWlFZUi1uSy5bGgywJVbwAhYd7W5FU+7WVp5YLMEB0tP7qYg84kzz2tF3th7hQ5gMqJEMuSp3yOWiiqCFvC6k+ydaa0DNsJ3NnpdUn+hmow9CBLHREnz98RUQtm2UeiINGE6Yo7990Fil/jT14QAroZVgwYsATUGbFO0CktdifhlL4HmJKE/nVhVimji6WtLzevBmN2WDj32CfEaqjQYOBA/uA+wM/GMfyC+5QrcCefekrpbSeOkVMpX4wlR5dXuW2BEgceI0M/cUHWYLuDuS5B3FLerjXFoaPf/sm0zQJ543mF51/Hrl5b87/60bg9id822D8lhIt1Xi6ZhPJE0DiBP3Y0vFsvHhMvTyBfHHJaC8tRcZqj2yXkBcDZ8VsPW736sGiUZeUhHEj02jU4v1ZaVFhzsDcl2pd5EjcP3Gtw6hpwDongj6HAPvsbR0XV4zeCHSsKBEDhRL1Ct74hF/cfl8KP35Q46qnDsp6mNXnIHuKUYNHOcp/Tqhn1WjN35J/Hi0BnArFIMZutnohF3k+aEIu2H4i9XLPx6CBcNK0KRZe70A6SU22uucHcuWPCbjzRajRFJmmPHCO4/uKLzrClZu0xMnxu9OBiCcjIl7Cu125NthcX4nbGZeEcq2vS2lzKHQxUbhhtyf/OQs+ZLOoFaUw1lR3HHSA6Ksgh4WrpUElDOjkJjU5+eLzmcFj446vVazES2L0oKevLHuWc9ILB96jQYOBBDeA+wMiSCbZHA9+efZLryV1YiRqC/a6fq5QJR0NtSmHEk23ZblnXEWRZndLO0FAoLYJJx/5uQF8Zbf80zCs6bBiEZEXIv4c++XW2WnGLPgk2ytQ0RhQLLG5bL+864LO9eqJjsrk30BRZcNKndmbmiZxvZ1jjlZXEPREpMcPiqVrw2rpPznmy0Z1c3rfheURzpc5xstDcbb5y4cDG1K1orgPVrd/gg56lfV2IlmforFNn03Snjh8rblmoe9OHNDYE7xbMD9kNnnPApaWhnNrTM21Zz+1btJrWpRze4LamvAcibKO5TyDM6JPpGiQM4MUknWmYfeSx3nQMUT0r83s2zx6vURBIHZt6Fbp/te7HKM49nraW0aUIPUgavx8rpp+mbLxaYT9wjQizg8rQnWXLoDGbZotsMY1eVAS7gNEgDYSWs9JRQtkI+7W/+urYll0vwWHcQfQDyhid6AHNi4+ahH08V3uMzcHEuJOgT4eX5Lmjfi/KtCbSD7/Yz9UyAGy5rqjQYmBBHOA+4N/fz8RB8z3JXt7cuc6lRNqlHwU83zLL7Xg/9SG23471qkWDLgZ9j5chWZ0Lk5AdsjXtJhZ18zDp/js8JGokUvYIf69qM5M5+C525eMDYu5IgeAYCxCg6o8/IV011VGGJip/km+ABdL0p8Ge/fABmFhBgLrhhuRMMj2JVxhZ6oxwp88RM0y6EahYfTbxpnQf7fm6PW64BmszQN0fDzSvP+qwjiM4Qz61aPDWuIMJsGH+C/iZp0f0q4/7+/JilvwNZ2hpSmAvVLVe8V8vSRNuMTEws1kEIKl/wPtQiRuypz4NmT0ocfy8Pc3KagMHi6fhs5sfNutK0p2Xlh/XBtrepKchKMVB+7w81CHjEXgvLuII/bol3Aqnz3+3YtrTCusCOgIBQhbcso6mrWVO1XTW/3tAkd2qmj4mRdXNetG5bU32/eKUaIndB8188ePl5ospYfdaKwtcdWS0a4srFYd5ga5Ex6XHRhW8AdjJZf5cIt2WGrjctCgFYdKiiztpCd4FrbuwkCjQb+BBK+A+4ONlvDO7lzRoItQ5Rg5I0uSMCY9+7rEDz+fgSqZXUvkt6FaVBSh1X17J8+EBvOmrk+/5wfBNcFxDSohPxn9Ap/5NFum46nKJQbOSuy1dh1vURHujEVzQpj5GcKjuH1BeYin+Q8sTgbeV2+yCyTpjuoqRXOxqxBO5ZHD8mxhfVLkhTmfPWYNLH/w4ByBheCoO+snEBTcf2XuInUprKuDY/Br8axWAirmjcW8cqNzQiQMNoCn3seijnjZi6di6N4Ra31Sx24iGh3hka3ZQKZiaMlXsl29ZdqdTWOnTVaP0WUw4hIVO2h5X7k8ybRxU8+dufq95zxWG7330cUpzbQ+myMs3A4o7Bpr3VRBStmZifDde0oyO/u5mS9pepYkIYpc4rjmyZFGQurduRx6fBwyno4wlKbwH/bR4sGAkXiO0UuY9+aFDWunnnSt15n2THINrfVRZ00PDnGCVPnI5c2CGjqHkChNjHykoTybFQVPW0Xp/v9onsS7JmLMzi19aJwy0fbV8t9POxiaDujYvbyhM0PNx7qsFCtHExyZoxlu/KflZM+xeC0vgzssGfM/Yrx52WKFaXujfC0pCkGjQcSBBOyA+4OUle7V8d+del1dQ+AfX2kTEsQtBgsCeGfBhtAlF0j/UBtzzLI1WK3/zwNyN5smy5jewmtpVfEAxcauiYrCQN9nykXo2ZJ80bCRrDn6oDTmkZ88bU5DBEo0783DMLe3nOgm9VwPGVQAe4ufmY2GJWseAvhwS7oRYj4CluSmVi4o1JnzZD0qDNceFZGjjJUqVH3YLMAbmkLq/qU75EMUTjs1F7gbbOu4Q7i3ALoB/g5ojh4dxomJd4Tf3Jz1WYZ7nH1nVc5y19IipVH3XZygYOZ5Ortgxc3SiU07F2Kgzzb8vFDKbEX6EtUC+aalLmlJYfQiD7HZLfvbzZQ+buL3BeWy35dNXd7KODnKRhWjn9Fam2TdJJ17nLEV6msWYIlBfn8moLSbXQJxb6kKRe7Un7Z1wcvXx5TajXNp8kZCz+vlCAFuj2jeMuWVL6i/HsJH++CPopyAotLZ1hHyq3HoDYnQjI9aF2BktGJxs/M1W3xh3v3IvVvkgBlLyQaAZrokJ5AnJv8x+1u2dqTKo46Dbofs9SevpdiZtdmvLNmmhApg5sQXEpKCXTeOZeKsvFQGvmgOuWNaOPv5t793FQUKRqNBjIEFKID7g4WA6tXja5c1OytvkgwT63HOr7vajJ94r+F8YUrRSv+aZo1AVbFlO3iEHp81P7NR6Xg0lVwicDhBoCPfvjwDhw4gNtqXuSYdrg/oFdHcUYktX+9LgDRVV8EhQKkWfrq/O+uuXFYYdeTtJaM3LD3WK3jHFet5NE12aUw9aauVDaRTcS+Y5jp6Su7UXnZ3o8Zy9yWLTG+dka2kwzaKrnbkDYe8n0xz5v7JWUrNLhFo9AkKUuC6w+Vx8wIRmm73LsFpyJkuEFwF9STc0V1h8cjmm2mDp6oqEiWQdqXArDZpFPVJ41VMylcOI+lPY7MeYe7SrbRINClq8tVfVhEo5kjUKCs8CBj5B6RI7sLKPRapa5j5veLdkNwR0QXfE4HH9AXTHdlswAl9r0MRTjTVdkOhzF6SAwJ2+FxP3pTY2TKolhSchOx5Auxt/WQ+oG4CuqU9TLt7lfoDDOD7Qt9rOKJirGWN9SE1no5Z48pct7kHTm0u4jlFPFkgwemf8eR5v6gbdAOu3mWWS6NBh4EFZID7g4B/7pxmFStND6gEidN5ZQO6VnEyNe+JFaAH9OZNYG6G/52RcFcLpBVqElRkSDKvUE8kTeGCnkTSl7cvBvodt6nHq/Z80Ok1lcP5p/qUo2HQEufDbWLo+LjNxKv08PI3N/JvWb0fYwmVFZCZvvd4c8mT6Rifz7woVyMpd7mNZme/hkrqruPvni/vgDaTGwlFPtYOEUZLiE/Sfqg4DCC+2cpx+2zdriBe9/0zWviQ8FevnH1ycYoM+NMPo5D8DG26OHooDKgGI1k22yF4DPhFQJ7X7Nr0P1DwoaUUSMWFGrHbF//TRWHTdHw5zw6fYlDesCoef1JgoWt8Q7XcVAOoqzhP7f0lqs+1Eg7aGssS4Rbx7w0VCor0qeRYdNb/M6CG1qVVLRfl/VXUkaHXLovqie+Is9hwrxWDpk16ZY3irt2SBBnHlxBuLVNoed5GJhi88dnpEiOMYWyY+teE6q9EcoOjHvzDC7+Nff/zAx68fYvMiMm9egcm89RSNVSJgJjtGFejQYaBBaCA+4N/gOqup+c0l9fkaHVxu/bZ+V4EBVrSlZP6echgc7ERYfs2KaGXAjO7pzArdj52MNF29CJc9D52E5NNprs/U4ZkHRj6Mw3yua8PHZ3RNcjkU0hkW4g4GDRt/eInB2ZX1eq1j13algzi5iv79bHvxIlXQBeoKfFSkMyqFjl1k0tX5knuN0hx/Ifa3GbPMeBqFN4evxb03+8y3IWTTzSt39Tme/jnPopL/5JS38XHwq/5nUcYGai+yaN/rKN+2ANO9255DJzitbREO5XAFs5qzUgHpPvgm63cY6q33lsAtTYpZIdgMC6fZEIXLaogDZKFJ/uA6kt+/a/Uj6lCq7NHrXIWT+rpJocJmUo3n/uAb+pLHqE3wykjfdmT5yHCmWxNQzxKH2LCV8eKPwNtzHLjSJauWAplJTagql4Fk9BQ0p/JSztBM5Cnw9t+FONDNfMSFB7r+3Tacdv6PpNcZHb/wYjQXqONmAbxuy67c6TvVsf+XwRjMVnvDJ+rdpYVMyb/+lWjQYeBBdyA+4OAf+q18mBLjgEq+6p75VGkt7LcuPBEXVAptuRMteyUWfaMTVzp5gvO/uQDiW/0KrswPdgpSYdFqlbkRUgamIkWY4LN2vK0gnX7D5I0IMnItVatxQkgQL1zNVHSrgDlxgOlPp8ma+rsS74DHFH49bYl6p/WIiUR6ad4KRINx+8yK3pV9K6D7TFsE5ILROUEzhngW0JlnLPTeZb+4f+vyNDOF6C+ZYbZKoEx/64KfIw3sWOp5I2Oz9WDFXI+YGy04jYKeO3JoG8i2m/T88XYkffO1lImX6HrJsrK83CQI1n6XjSq7+HWzh6Kjt4OoDJ24K7pYwVNFjdEy8e5eCMKXD1qXfScOjcxpfOf1BHx8m1LsLU5wv27Y6Aj2wXA6oUHw+JiGjK6c911SE5He2R5leC7xbuEKEGymS+cfl4tgSHFcZY7PiUmNCe9IFRllH6oBfbuJkZZuBwVnnF0bDHRnXo62tE/Ku2Zqm5vPyWufbG/sUzDpD1XMbMCqo+m/4hpXKpfo0GGgQYYgPuDf4D0cktJTWSrDV0YJdBji87/cwaSvfyIUOdhgfGLZ87v4Po2+/doUWJxY/bm2CvNy27DI4UEJAisyalvwEe2ukEW93K71UO1zE2oQVGJn5qtKPmbkkyZnGaxXFyAlBovRm5XBtKKtvB0qjsCdvSJxnuZ2bfxSn/tV/6r5q40ywpf61i8jvrhANMtlq0Hr8JuHIOYAtzBohcHBOiQkNCpf2dgQG9HU10r3fKW+0EE+d2cV0FanuyZxQallDTh6pT69msMYw18gKKVDgugkS+a7bCShuuid7+toWdmqzZVuIcckm3LR2R1Lz017UAJt4UiROqoGVA9FyRVjYqtcVmX2mD0pJWU0gdBUxFsQTqES5GjYhR7eBeiV3wBAOCcq2kFZKbEzZ6tT6l3LTqPnuYF8hHHAl1CfTa2K/qJ9VUxUn6ilu3m0X0ywwXAPK+vnin8XAJPSOT5meY7gV/GtWhmJGgvGSMbBhqkv1oX7ydMeKXAUDBwFTZjB3Xvf6v+A2pko0GHgQZUgPuDgH/rS/Vxw0tdFvURGYP4KsErhCNQikuyU0g2dkhrDJglQKu8diGnIdoDX1cvV4L2my1ZJmEzZrcfSnYxjL6X5wHVNz6eH5n5YROxvAeI3gFhoPlgvVQOvygg3w22N6nAb7JQ0j0RkqyNQdC2nmrrSpasXfU9a8pmOqu1dVMYe7I6YerCO1O5OXTNsH8cyGdXe1d2lS7CwE60SfXywn/3stK3iBYvxWVIHA6SpVSk9HEDl2dleuFUl5DyJ0/au5KxJhTPQC/J3xY4Sw1hV43WNgHnlESTmGFndt7nvyVgET7/GPOX5mi9nlgm5BbQzT4iF9h9vUx9NpOL+s+rhE3I2GDqr2iofoW6TGp65hLCyR4TApzN/u8U+KV5oDqaqBpF1QA8Ur1Ye4HhggDSx9eOpnYM5Atm4VXePmVWrJv2VE1SZ94gUc1G19d6Ue124vHTtXyN2+oTDlhnTtH24T0tsLrG2rXejAhtQ5N62KLkR5KZEy6ViOrWeEZ9b6KbLLV4ZaNBhoEGkID7g3+A6ve9WfYcwIlWJZW4E7iKlf9pCNn+DPO/7SAae/M9XNAqfSF/6snUxltZk+HNTtetVuRfOCToIanz2tlXMbdj3nZg5dFpiEM5RrmEvIA3rmD54jGx8/wFg14bA2s3yh42Rb7EcZ0e0lI4JMBux8qFuPwaa69WGh/3jImklD1YZex9DN33dJCXZXcIw6n+JuI4DSwEkv1AiF5UvSLOXIhzMjHS3YCjPaOA0GF1RehpvvQGANBAe2fUxx/7fAZZy9jz585yVGWvf4s7DBiC4qIgFoKeWbjXiW6AGhLHEzIhQIkAsAWDIhJIam774GqBRt7PHI+mKzflVLSvhZ/Ugdhk7e7BViVbwFZzFKzFhsTScIKaVns6W8fTk95AbTOnULaUzR6kkI8O+fYYNroT7uk/+ZpvgRvLxSfbjutx7O/HGgOxTI0SlDfswJrnVznVCgtctyTHszpO1MTNDv55M9h0kGxIZjMlc+iCBuIXVL6wBkneBNRKi1UX4q8XFsIEYqNBh4EGzID7g4B/6rRpBLBG9xLgn5bP3hsSXip1jPm5u8P13LqMxJaUHl1Sqirn4Xupyj/O3bTncsVl8m/SwZNt94x8bwYSyzVxvPgyZPSi20HBDZ6gGKY8/7WpzkiXMe7/hrBVyrovOQaRYyQMOJUopfqwsr9C8YhzXDOUjNxyinVA0QJ/0LduiGMnWuKhmLApUPTwnqDAXg6ZD5ZtcMNSP2McBVNJ0CYhyNJa4BC5PgsfvxdcFbER55xGhkZ+gApruGcYNqKC7wWXOgpAeoltiu8oeL8WXWIov/Nd4Vkg1iOot3mG//4HcPgXwH5xNv3ZpT02X8v+CXQj9+34GzoRPbmZXSayJMMxCmB1m6pFb86GfyKaRwYoIycUCAEiSKUHqub9ijFO3ftQFad4iS3rCphPg4+l7k8XNqnXw9xaDVU9YAEBZUW0e5t54pdEeEBAbnXQabXrAAi4HZanhUfw9096oKO/3aSHbpAueZmD5IeGKoklFfZi71/vIl4SoJ/y4T/Kzw5824ejQX+BBwiA+4N/fe0gE9oDzk6pPWticJk/R5FTjvon2CHvSq3CR5SL4kJIDSwtYpPjzDCvNAmAdGGkKYRtYWF8l7GuIkcy7/S0cMqhKUrLVeiJm7AGVgLa8jK9JS79Jre8BDOsT5df93WB3s29/R5NRFRG+N8K0Hw9EOnxxEIeNUAREgLfMB1JVkvuss/QXJZ2+ZMBgO5Q6HxwWAIZacuc5DXGjtpb8dOS5Awx1445WwtgHItCQF4qh/TpOdZE8UbNv8MFdWk+Y9r5vDQ+IXHseOal2HpNoFBvw6XedhtL2ojBLgKS68Ov3P3tZvgbF9cSQu0sNVZwkitC1LCtI1P9z9oU9IyGTusuYXf8N4MdIq+wRyggQ250wd3FE6BDZJsAdEZCgw2WdT62Rki6nA5jo/tycZ5WF4z5dGpQKQv7RSaVmtCqaA1eZJbaMqJOq479Yr99l3oHjSpbQ+lErD1RdWkZeJUJyLNX5ZAdvkfRDUZxOP+MulWhINSlPwTneAsGUaNBYIEHRID7g3Vx7BWyG7DcH6AYG7Q459BzUJ2ZG4HEC4noLN2b1d5/SBZsKGcLn0/8pIv7OdNKYDz7rPLVgq1obd9qn40C6vNxSeNK80rbaqqZ1rud9KfBx/noFM0UBImUapGmCyOEIpUeDm4DJF3PrftupEjQaESe4h/CC3ZSFRTudVfq+V+BKHSr1z6BW6xyxzVX5uD52AJ5+lCN/mh+NN5Mf1X3AfNOsOqw5RfMXpFW4nzP6fAgbEoFWeJbDr+6xxa4IIq4i96/wWCB1oaZlYxU3VP4OMU/SjAsjvqeflmF3SlBALxFuntKp/Ta90HsXFzRNorF/tthsDuCKOgHqPC1IzgqZxMcwxwGXZHCQSvhFsvS9h85ruvmHOL5AewDFKxegrQPQ55I8SWF/pSkMTv4U1dKv13IkZSpizZ5aOLpJ8WbQp1MFvWWNxHO0cXbH283pHZLsKyQCrOw7cxcVD2jQWWBB4CA+4NzdexUs9YVJOPdr8Rja1mRLN/WQYwMCcarET9xjsD/nSC477CKcUfkhZG5xodOb+Rsz6K4TARyiY31BOaCZZxhOCDn0KCMLu9TndVasMHgetYNcaHDP6cSQ0p2eS4OHDogdAVG67D6WK0CA9T2ipy9veZRJFAbKiRvy2k4+7oHNGUGzu40/azOsKd87nfqN/J99yv+GYxQ2WZQeJ+vRbtFIYPa0YIwuwk7mEMug3eOjfqHTFNA51r5tMy5sZlxDMWmeh7x07wJcDdt3cTMolRLXmBb3jTG+t1UgiJ5Y7HWaFqHaJfiojj/46zs5FhU0GLeXe6TIN8HEJ5L8JYFwqHs+JI7L4UUUXzYaRQn+IkVZXTQat0VLqdbQJT/z7//WivKxtpsHxNKi6uKN/rZ9wRFXiCnsN/iVj1zXPcQfj3enO5sNtAVstcoJNRhQ5LAqHNmLxbafdwE8Z25O3O2A6ijQWiBB7yA+4NzdgmdFxOo3G3yaW+oSJaQ6Dmx75E2R3kCpEjOhRiybt20XRU4E35JeuQxMmYBYQwauGBwePUB5KvqAQjx4IaEdHNY9ntqsNciJa8cR0t4qZOgv9ppks30G56LIHtqvca87lShlaslIOFCn74I+VFBltnyFhAc9h5xoGdSDNqPSsgX2cCV/gCnGETS97oR1MDkYMiS3kzhXFhBofu6tE7Y7dCjgQe5gvuQ4c66Dpgpj11g1b84bvRGl5Qn+NAHcCctoY/WFNiixSDrh77ek210LoX2+RDjCQISDkKlI09ORqE/s3qAPE4rNn6hFoU3rUYbim6+DkTxhk9kNdiEYt/ia/z1IgzfNR4YwiHT1BI6AGg/VhGeuCW5+qEZbrakbBf+csfr4ZEhiR7L6nIO8jDKK/uzw39ygd5LVHY5I0wzJmwcDHrI8RPKrx6AW2Puz6EaFlCy3Xi9yfojW6Rt5FXs8pujQXSBB/iA+4N4dRZoFsbVzhOkjBoqBmi9lwGLu06T5uOEMvfrj7hkcD/A4IuEAWVrj3T5aL4BlKjn9K0pHYJ/DWz7eEXaNTIdAak1qgXtvK6lZohRIRIXzwHOQIcX2ME0hwl9o4HZm1hap6mhnJg2ZxNY2NlpF5prPPFUiTeyA+WXDRzuKEIF70ENSN3aMLaJYGoZfcZtzD71iqOgn+VWxiiPYzySy4SNBjDChpoa0cNISkirOUiLdodWw1+DB8XfkWCYPgEkFeH39VO/T/6OFJeI2z9ewOX/5Q68V9dFN6/kDciiDAduEJf+x6MbbA1BWPoVp1KuNgi6JcxdFZdXDs+974no+cXZibim3E3DrBXjZA9TIKplvB6/0fkZ+MFZEAuHYk65QyldcuW4zYZjHua7dQNRSuaTrVD1vH+xXoQ20kpAo07BwHLQ3F/OraCWG61EjH7kOKkTu38EGV3Tw+J5XtlFXT2C9E8A6eC2k+GvuOmbNrmjQYOBCDSA+wPsQ272UL59VaoycUbwyDZbtbXr4Yu9frjn24RzBqqeUfY8WaYJXmq2NxmjFlau6UlEfyDanjBR7F/OIVyzDHlyKNQ0qFXlnANZAiPHiLvcGdjhNqMdqlMwCaW/Yfs0tEKtNEaSC371RBSjCQuU6sf5jcGYEvfq0ZIdyJJHUIh5H0/sP1PJga482I9ZsLdb8hsPfTqkRgaUdWHcD0pozzmUngr9tQcrP1Eg/wOSI5lNSpXsmgjYXRz3xlnO8k49L82A++pkXPAVQfiIjKA6DIJfxMf08INrYkFCd504AAL+FTqxahYQlIkx9MIGbQdbeKVc5Z+I2iad/tfnkgTLTSAHATiKzQ/+D5d5OCaAdQenjjmeCWpb4L6hbHllxZCKfrvk5OBrn9e+WroJcG7xEn68/8p4F743/rPtrVg5lnkGpjJakyPHqv98t++X/BQlFsMy0NSqoTit+Z623X1Dg3gkhL7a10aF4PV5Gukjy4nGT+N17W4E0kK8kpnoC0yjQYOBCHCA+wPwJ1Okfe73ueLMAJzKSNWnOQsCIPmuiig7CLQ6ZSpB+f+YuUXxMDhyYhaWwO1IdVcA2sJnm78/yTxsZzKwZj6saIuwUM1MjRIjW0+N7QIuzLzgOFi2VlRTwa34kFCOov3K81HwORacZT2RJQ63DEmclWe30gTsXBXO4+CZv8iBAT2qFEn2GAEA6Cqb51X71lHUlj92J35feyOBb/hbt2A51FKVeR7Ob1d7gBfTrLmMG0Fbrm5sFo4abzJ1pk5WmDGTvKnXjQdIIp7B5kZuqYd0tOGH3B/H29OkdskcLFhk479hMlXog01qOTt9cEZXHRNhsY10RNmin4X5teAZofAnLpCuvUQ/7dgLfEm5DrM8Oz2rOZONXnLyuRYvXeVWCblzyy/Wtgdau1gbpE06g5f2jGBdF6P4tYEm2ikrWjiXmqeefsOgtYt0ZY+8sG/SPqhY51rRNvDZbXj2hXh6tb9TnrBZexz9aU0HAvOtfVFtCTAKzDioRNKTY+LOOn+jQYOBCKyA+wPsL2ujrX2dGycZl1Ww6f6T7nujYUAzTbifSe/Kn+G06wk+YFDGfjFAmI+z361/qQJMdfNxxIzu8KkfS4n9o5MZdr9LQOeNI+N1D4zBddwjN6iHUH6S/Z3pY0iZmdzc9N1j6jGk5BA1Ec3eTpG6Uul7DZMmPk4FY6EtrIXY/5p/wocvXKW7uGY84EFIFdGD2LM9VpBG7/3j3PG9t2HV1LX0yQ+6Ni25jGjltUVUYOqnIiajbWg53H4JToMk4bbDspPIn9ujLSQl9g846gABkdiTUEjiT5rqwUyux7Lmg8HjO7fLuV1Kt/JuC84eI+W+CDOgoFoEomgFj1TAb215gsAdmiYQ0sLmFHJfiZTdITSKl6bQn18RCvlomRAICuHC3zHJr2pfHEO4Flz356M8djkSkBBi/rVUWsprIDnRCWwjU2ZtFXtwATPx2rDlYw+6Dl8ttac+5/q/S3jzn1J7otzTg3rtwLxord/LGPrEPGOqT0r/ZY0ZFHbSoOl8hYKjQYOBCOiA+wPCh0FzRPu/G3YAzhX5NtLN1EPvPI+hP+dVsyYn6TXnmNi5TtUTR43PHxqksEHMXZkxDyxFePIXYwsa8gpobgFzu24Vh53zpz8CZ0q/YdNPIowf1Dnmp1aQaTDFNlUV/7+pXtAjas9nny3M5bGU589I/G+6zLBIT/h0jfMfoW2CwoZE0GyFe9ngEnoEz7t/5GDXwZXVDyRFo8IXSc8ol3cUQZIMALDqCrr8iLLcK8zBOJigXVkbZJDC25D1yLf7VKbGGgsvjqmDHxn/j3g+afDRMA0K1HoRoTIQrOjcv7w3w5zom6BSiRLkYqQhVOZNNl7A6gIpYlWVBPhjoQxZgK1LtGE3JO+4lZMwEM3mFjGMIJEIa1DFESJaQXO7UN/ovdgKDRsTamSHBehOPP8uJsRPze0o7mEEofsrNvkcij+7CexbTbgfiG3C3jmvNi/2orG2E10W4Az67vJ7LX1JKdbIhu7n0R2zRe5p/91P50ODrpONSmQk1Ce4QXKHOP+jQYOBCSSA+wPCprath4LyUHi28GXhCbZVV6+tBOFJGJT/vYikFeGCX5/oQfn6zsLIo7uWLYmoUPwy56qYAUlPNwYEqUJUKwrDHtX4AM4J28IIVzqBMME8SssRiam76gJQrdg6bbvGVTfJztZuFwRl8C8bMnDDngZxcmuvFM021J6oLNLOrnmArJmrlv0oEm1YhcCHWswUI95Q8yag6c8hhfDN9KdX+XC5cMJ6gNw9BCA2BMhOcQ0Y3hxZRt0JYh3DXhYGGNEdyQXaitDnRPIGcSCW3xzIvKHsIz6+m19dmymU5JRrECc6RGH4lMTuY9+dokZGKBWO+inPlPWw5WyEeVHJCdL+/qxTNMns+xwDwKCIAhWNDlNs3TAIQbPr+obRy9aMe2Ry6yfbdKMqWoQfdPRA19BGANvpRdPJgJ08ldz5H/8l5oNgTmsXQIzuCQPiHzqYVWfDznU8p+d34g5n9sA7yQxJr0r+COKCO8R1z0T0nKI+tCqW1KVhLm0ok5jC7HHLavyjQaOBCWCA+4N/fxNE0WW2c8ULBXMiz7ymtXi23KujT3leEQVHb2NHAE3xPHFFrUOfzstt+BYivh5bJ8AVEV8xe2Ck8dyAxy7g8gvy6K6gfvN/3pv2yeyEP1398i4plsfIETHcNqH1mTa4rXMrwX7S4umhBo9+U2Db0clQpg//0w9/o95GVRYEN5TvypwFr8veVbZeQ8+ka4vs4+Sv2Rc/2ZYGYqyp7iDsRv+yOozUhQkl6PAnkpimhWJ2fUsShH1LLTVsanN6rlZ9Re121xNPVi7OIAAgRm9BtZSmu+1WrSH3dJfkVznCDQk4tqywz28639OhRiv98uFo1StKOGTG83WA60a8KCR7PMCP/NYPM3FmSia5pk76wqH5NJ8Z9Y82rqKgI7HFTn/RtLJ/s53vHNrIq43jMWAQFTgv0SArWhGIjyLF+EUWawPg46vYVtgQl28KI45Un4MuAROKMMi38BKhYhBeLGiqyz5uyrnO7p/NRrrTWlgkxB7Xinah6vnpyOo2YFludtQyVAKx4gOoZj2CIzAftzmOswRkeVMy44hh31MMMaNBeoEJnID7g3l6772nIV7HCcwbA6junN9kxO+xQtxTQO4Y2ABBoaa/cNzE29kFgrT/Q2nwnO9zaw21nT78QnzGqRvwmIGjnR3X+iBp7N11v/UIfFKHZMXkTy/vtGUWeNkj4HIt2wpDxZsTpByWcekKGprrYbEn7ACSypLBEsqBzFFEck/V8j4TbI+35UFf1e4etoEvPNWAwVIsqwpWCua92fr+EjTnhicbShVe3zWW7m+7iFZysMkw+GNmQ8MD4Av3O3npbj/BNQPft1vBgcq41zyNxNxJP+p9h9QIsrrAJAiyFuC9Zpn+QGzXTgotOUiw8Efwmsur+ON4WJphGp4wo5asKL+hRbnk7k+NoyJSP002cTisRWXBtjR/s4DUBFKMkgO11dICOHn8+sEqAS65bIeYSiwP/WZfZOsFGedvHrM1jMYQpb8mV/3xZ5xs+yUa0PcIIdA4pYsn+L0yLoM0C5Ljrcy8RR6t+gdyWAm0R+WN+i4mmE2waWcwyKNBdYEJ2ID7g3l1Pys+XJJ7Qg8stQqvm37FLd1bwX8ZnTiONnavmvtK6p4MMcGgBWGRjbIMVQB7AqZUMDPNC2JXnESUun7S1nxmxMLkjvufqvCTylLJw3l8vWY053lYqxDgumVK0mYn/TCSVbow8bMupVQ69VHADKnzBurIGEylvXe39T+pei1cRPbh9edXg6bx87ktbKUjQlU9PgGs/VnzzAxx1xxVFwF9+s0YizfE3MIKtM2COoC1da4ATzg/xwbHhjAvbA9KLoJSqN4mQmLeCxkaiBUD8Z1roHk13Wlga034x1hCKH38yH37jfB18sg3Z7I6x1yPIlIv7AD5SJUThkDVs9eTeyeCkDD2t6ozY0oKq5vkyg1qO2JQUbWQyz3xCpO+vPu9rNQSVHVg0hPa3wY+pY598P0DgKuVICyja1yU+1VmxcPfNjhIbZgg0JMzK6LWp/+JtvCQUrTIO6XJat29s5eg2o1quXPVKAbrcK4nbZ1XrRSjQYOBChSA+wNAO3IfDg7FDTcOEF31BpIAPbZeUYsOXbcsem19bJnCaGdPhYNZxSTo5JyVpqr7281j6AKDJEVwtWfR8Wk2fuvlDm7PFITJITuEsL5Fo0DFs2UGvErLbT8elzSroZxDX/72PVCxPoXwLlg5MRVqjIwcNGg3aW8iZf5OX1/Ml+3jRDgiOFH7FF8/d0tQi0XNqhkEp6mEx1KcvABMpev69oTqlLXsutUcWN5KWGn/1xD3xkD8X3HHb0kwWLqsx5ltZelFDjxBufUDX7b0gCkSOE+Es9sZkaHIuhYkiTKH3SEwlfGnkkgSteqF9NVY6c7JQTcXKxFDMtrVnSW8RFHs2BpkMSgE+XNJFewyVim7YvEliS6VWQHbn44ZfA88oa3GqD99+S9TMTlz5lHdJMNpv6ICLJTWbin9ygixUIXaWUORSQbRcaHjTNki+Vq86Wty8gjK/TSYCUHMDeWCECjmltx9AE1L3rhX0uwZ8+Hoy1zibxlIQkJqenfgOybh0GWjQZOBClCA+4N/f0RG1ixyAluQZm9K34TaPbenX4ZmegKfFKA1wiNq+USjDk6bEhxznEngwQgmnLzmjAJVKQCSCHhSnBIQjUtdfV9bSgyT04kk7bqLrq7Huqzms7DVZdgt1xNgLZPUpQMQolAJr7AYNi+v1R0fRhemMvi1YJunKYpmNZD4TJ+dTz0WXVga2qBChcK/GUfj7rSZgfArYCMyQoCWBy9X7k5wCUxfHOI+iAbWLurZNjqYw+ls2bvkEdPXc5us4BMHM7OkrXDZ9nLR9O4meBDWmYwek2hKMWv9eFXE4lORK9V4MveU0pZU1vxzKSb3sMyyy2qCHHVJSe3yfRjssT8S5vVSq3l+8L6MAGT/T78p1P0ExYOMNDIBfHt1kNAn1UhBBOAXMFdY88fI85j02ZqZ+kxG6u/iZSrDp00+WQWkiGbEonSPoDwtMu9IYE7vLvKto+aK+uiNfeTZjwx7EbvaVjArfai6uNIwVUxQkakHP50IX91U/dyd/25dWaHTqqi9FvMh0g4zwhbHcsxiE/kmo0G/gQqMgPuDkZGrBRmesCoC5IEOj8oHaszhHMn8ANzrTfMOsi5sy1o5c3B02eTeADOq3PqYsTCuGy7R/T7BP55sCDOJSKhB7+NTjGYH4YV6TdHWodoNCT1gBFtU9cNsHPsozIM7QJtrVokziMEkBSEbtFtEoGWtuHvS8xj8JZTBLXKRXlaQGsEJZ62ZhyasmpcCXCD3sZV7zCakrJgvXOVmw5jCvpRLMhe9kVNiB3wtVnK/djl4eyyYNS/Be4TsjzSIuQCVrcL2C7vhxTd0E9WxLRI49VhG4eexeKLvwYy4OPhJE+ekfiPwd7aMzPQklyGnfbSGDbyB6ZQgLIKtE/BJ1viQUpSM8daZZ1KnyTsPRDV0Y4lv2Beab2hxbhuwczzQHlAJbE35uYd2oKyj6DohLD63NLBaKMTHf+ITsDzDFr4vJEW9+ccVKI2IW7eGm+LCuinQBMq0p0zAnT4r9oNH8vFOwbh6xPX1vcrDu/qugKXcfZUaJV19b0L1eDkdDncHQpluyTPhR26yh5NbiEtnRFKKIG5PKcK0F+W5M/rKgyNdK3VFJdSy/X3Am13xGxTqghMeKdRKQo4ASlnrZKEXo0G1gQrIgPuDkY2v6CDaq5pOBwokW/A//y3h7A4SyBfMCra1pFqEBzIVFnFRfrt8u15z9fUEIDxXDPArLJazHhCIX2JAONjkUskQgfBNEGtvnfGGVJYedwO31oE9GFKrVAQmJ6Zrp/SgKW9dYL1rNm9D47N9xQUDZGm0d84pUcQm3llo+npX0nTmeOU241KgijOwj6dNu9ilTD0VFpSToSVigLGIW/ZA5w1HfkgJCdO2hpnuo5pGTqqUsuADZDOyDJ5WyFh9eqcuuQjMOfdwWSbf/rhSVv6FhjIsFu+2QgCoHRQIOGGYKe8V9VnHETk1uq59qU+D0VBsTgZLU3NIeKvbtYV+ESemWixZ2VcycoN+w4Nhvo2JHTnlxHrrrf080QpMNkhbmb6Sr+cFr8z8AuQpvDL12Co3wo/d90mFn9pLEkb6iSH7eMFujUPMUsbOWkyr4WZnPl+AhMP2DUiPBwvOPtMymqRBkQJV4/5XRxolfHP5Ug9R7XEeq5LAXGaGKn4N396LRzDCm82hGJ8YU0d1nZHSjgnFNyqojYSCe1JYRWbx0Mi1bTCJ4VUp28UX/em+zuejPNmjQYaBCwSA+4N/frAhu7Se1Vk3EDYBgE6yYrQ9HjQSl0VpBO9+o3x9441pPQGyfSYZSOm2zadll1ivz/yUQPCaaqJBJux2AC74E/FCYDaD5ugqw9OXUQ0OhlziIBHPjL5OyXOk0Uy5qY+BGUphZi9yveQVihe+1IH/lLTgClOzTJNsI2QgPZCpWtZDgD/0ysO87mDVggB93rLElRncKWF/jXr7GkMhBwQCpkaJqJiIS03xUHXBYcm4vQCkGIoWpWUUDlo4hotQs0NRhQFMH8QzSDP+I00aG7gbk8TcoeHoUliyNsMhzmJ6w9WD7c8rdep0YqAQETY2KkOvyX/jUcZiGpFY2r9kaxdDOaj23Yj91+PuCPrgaBDNpbJkueydSa+duPl4fTxx0kNy7q2KDXm7pNVZPLso9JHrdnw4f9GI2Xo8Grykj1Ul9T633z/17Uf1A9LgkVtSVfCquIRUC5Yb1V4O4lmCCrTbIJLQYACW7VOAZSig6aEe8PCAIjTTM508ZCRLyXbq7NejQXqBC0CA+4OAfawq7rc+841xn7poaqevPFj1pLfxOGNchl9ciD+gGyzWxB/xsCID+nTVnIkD7D/GGN/rLQbZM8H/nBIAev7etcTF6lY9DHzLTbNcLhaCarJtahWNcISO0Q6H8KZ9tQMmWrsRuWKH97m+ktIn+W9VmjHOt0zTpl9vDEEghmDumuTbPih1BuT2XdpmdSFAkE0aL7c94+DmMh4ty1NCNe5U9XxGmAJE2X6SJ+/8RIwb0q2qi4cXGtErOJcs1iJIyzNY3sUfwwuhRgh5aoILHvb7op6CUSra6naxswSnyrUIf31ixyPM89TdWunmnNxCESOmc0dxr8YezmShtH9vMi4iK3xqp5loO8B3o1AJv1cmQXfmSFZSS6TlO9QNjm0IaTf4nkqKPIuJ7dJz00sBH5h9zUaMOwnWw7dBV7JtHFUtfuMt5ON3rWQDGOOOSNb4lbDRfb+NukSx6pQpe7Jhi1AZgldTrc0KiJN+e++k3oMeJi1TcOKjQWqBC3yA+4N6dDwMxHNO+IQBmpdfP0txPhVzIcgigTgvKU5Z5B1UsRKL9ntO264vpfXb5qIKZxsTL5gRb8cr9SYTeqZALPPTiPyGprMmcMYuymH6kpdtssAZlmkq4Fkqn0MpsUuusampz8fBVqvkLCLFskN9a3CSKYFYtq5xs2tOY4ZbQOZ233nmPWjtAdzO0TB8XKDJp0uSQqZL8Nxi8qDxe/jPJ3BsNvZ45GD/a1d586VN6+tJnhpq2kUi1JGbjYm748Pqn2jbshfpzO8yxTafKk8YdMTvOZUTJDspa9HGPxojq/Kre4L3WNpEFtsvcB9voh213Jgifb5VIHZtq4wfBeIRSdF2sI+CEpWGXpuLHN1oOiprwIrq5LpEHhHGxQIiDYSiIk/gYzGadEuyUOq8o2B1k5oULZE+dho02hlKtK0cCWv5QADlyz/QhXPZkiwqzgzCRJDGj/1y02xNbBb4ZFanAXgtm/l7fg==", +} diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/package.json b/testbed/gradio-app__gradio/client/python/gradio_client/package.json new file mode 100644 index 0000000000000000000000000000000000000000..d6962e4b2f45153ca9c0db8f944a2919fde0c24d --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/package.json @@ -0,0 +1,7 @@ +{ + "name": "gradio_client", + "version": "0.5.2", + "description": "", + "python": "true", + "main_changeset": true +} diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/serializing.py b/testbed/gradio-app__gradio/client/python/gradio_client/serializing.py new file mode 100644 index 0000000000000000000000000000000000000000..04fd3300969075314f106781fb5940621a12da50 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/serializing.py @@ -0,0 +1,582 @@ +from __future__ import annotations + +import json +import os +import secrets +import tempfile +import uuid +from pathlib import Path +from typing import Any + +from gradio_client import media_data, utils +from gradio_client.data_classes import FileData + +with open(Path(__file__).parent / "types.json") as f: + serializer_types = json.load(f) + + +class Serializable: + def serialized_info(self): + """ + The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description]. + Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output + """ + return self.api_info() + + def api_info(self) -> dict[str, list[str]]: + """ + The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description]. + Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output + """ + raise NotImplementedError() + + def example_inputs(self) -> dict[str, Any]: + """ + The example inputs for this component as a dictionary whose values are example inputs compatible with this component. + Keys of the dictionary are: raw, serialized + """ + raise NotImplementedError() + + # For backwards compatibility + def input_api_info(self) -> tuple[str, str]: + api_info = self.api_info() + types = api_info.get("serialized_input", [api_info["info"]["type"]] * 2) # type: ignore + return (types[0], types[1]) + + # For backwards compatibility + def output_api_info(self) -> tuple[str, str]: + api_info = self.api_info() + types = api_info.get("serialized_output", [api_info["info"]["type"]] * 2) # type: ignore + return (types[0], types[1]) + + def serialize(self, x: Any, load_dir: str | Path = "", allow_links: bool = False): + """ + Convert data from human-readable format to serialized format for a browser. + """ + return x + + def deserialize( + self, + x: Any, + save_dir: str | Path | None = None, + root_url: str | None = None, + hf_token: str | None = None, + ): + """ + Convert data from serialized format for a browser to human-readable format. + """ + return x + + +class SimpleSerializable(Serializable): + """General class that does not perform any serialization or deserialization.""" + + def api_info(self) -> dict[str, bool | dict]: + return { + "info": serializer_types["SimpleSerializable"], + "serialized_info": False, + } + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": None, + "serialized": None, + } + + +class StringSerializable(Serializable): + """Expects a string as input/output but performs no serialization.""" + + def api_info(self) -> dict[str, bool | dict]: + return { + "info": serializer_types["StringSerializable"], + "serialized_info": False, + } + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": "Howdy!", + "serialized": "Howdy!", + } + + +class ListStringSerializable(Serializable): + """Expects a list of strings as input/output but performs no serialization.""" + + def api_info(self) -> dict[str, bool | dict]: + return { + "info": serializer_types["ListStringSerializable"], + "serialized_info": False, + } + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": ["Howdy!", "Merhaba"], + "serialized": ["Howdy!", "Merhaba"], + } + + +class BooleanSerializable(Serializable): + """Expects a boolean as input/output but performs no serialization.""" + + def api_info(self) -> dict[str, bool | dict]: + return { + "info": serializer_types["BooleanSerializable"], + "serialized_info": False, + } + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": True, + "serialized": True, + } + + +class NumberSerializable(Serializable): + """Expects a number (int/float) as input/output but performs no serialization.""" + + def api_info(self) -> dict[str, bool | dict]: + return { + "info": serializer_types["NumberSerializable"], + "serialized_info": False, + } + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": 5, + "serialized": 5, + } + + +class ImgSerializable(Serializable): + """Expects a base64 string as input/output which is serialized to a filepath.""" + + def serialized_info(self): + return {"type": "string", "description": "filepath or URL to image"} + + def api_info(self) -> dict[str, bool | dict]: + return {"info": serializer_types["ImgSerializable"], "serialized_info": True} + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": media_data.BASE64_IMAGE, + "serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", + } + + def serialize( + self, + x: str | None, + load_dir: str | Path = "", + allow_links: bool = False, + ) -> str | None: + """ + Convert from human-friendly version of a file (string filepath) to a serialized + representation (base64). + Parameters: + x: String path to file to serialize + load_dir: Path to directory containing x + """ + if not x: + return None + if utils.is_http_url_like(x): + return utils.encode_url_to_base64(x) + return utils.encode_file_to_base64(Path(load_dir) / x) + + def deserialize( + self, + x: str | None, + save_dir: str | Path | None = None, + root_url: str | None = None, + hf_token: str | None = None, + ) -> str | None: + """ + Convert from serialized representation of a file (base64) to a human-friendly + version (string filepath). Optionally, save the file to the directory specified by save_dir + Parameters: + x: Base64 representation of image to deserialize into a string filepath + save_dir: Path to directory to save the deserialized image to + root_url: Ignored + hf_token: Ignored + """ + if x is None or x == "": + return None + file = utils.decode_base64_to_file(x, dir=save_dir) + return file.name + + +class FileSerializable(Serializable): + """Expects a dict with base64 representation of object as input/output which is serialized to a filepath.""" + + def __init__(self) -> None: + self.stream = None + self.stream_name = None + super().__init__() + + def serialized_info(self): + return self._single_file_serialized_info() + + def _single_file_api_info(self): + return { + "info": serializer_types["SingleFileSerializable"], + "serialized_info": True, + } + + def _single_file_serialized_info(self): + return {"type": "string", "description": "filepath or URL to file"} + + def _multiple_file_serialized_info(self): + return { + "type": "array", + "description": "List of filepath(s) or URL(s) to files", + "items": {"type": "string", "description": "filepath or URL to file"}, + } + + def _multiple_file_api_info(self): + return { + "info": serializer_types["MultipleFileSerializable"], + "serialized_info": True, + } + + def api_info(self) -> dict[str, dict | bool]: + return self._single_file_api_info() + + def example_inputs(self) -> dict[str, Any]: + return self._single_file_example_inputs() + + def _single_file_example_inputs(self) -> dict[str, Any]: + return { + "raw": {"is_file": False, "data": media_data.BASE64_FILE}, + "serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf", + } + + def _multiple_file_example_inputs(self) -> dict[str, Any]: + return { + "raw": [{"is_file": False, "data": media_data.BASE64_FILE}], + "serialized": [ + "https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf" + ], + } + + def _serialize_single( + self, + x: str | FileData | None, + load_dir: str | Path = "", + allow_links: bool = False, + ) -> FileData | None: + if x is None or isinstance(x, dict): + return x + if utils.is_http_url_like(x): + filename = x + size = None + else: + filename = str(Path(load_dir) / x) + size = Path(filename).stat().st_size + return { + "name": filename, + "data": None + if allow_links + else utils.encode_url_or_file_to_base64(filename), + "orig_name": Path(filename).name, + "is_file": allow_links, + "size": size, + } + + def _setup_stream(self, url, hf_token): + return utils.download_byte_stream(url, hf_token) + + def _deserialize_single( + self, + x: str | FileData | None, + save_dir: str | None = None, + root_url: str | None = None, + hf_token: str | None = None, + ) -> str | None: + if x is None: + return None + if isinstance(x, str): + file_name = utils.decode_base64_to_file(x, dir=save_dir).name + elif isinstance(x, dict): + if x.get("is_file"): + filepath = x.get("name") + assert filepath is not None, f"The 'name' field is missing in {x}" + if root_url is not None: + file_name = utils.download_tmp_copy_of_file( + root_url + "file=" + filepath, + hf_token=hf_token, + dir=save_dir, + ) + else: + file_name = utils.create_tmp_copy_of_file(filepath, dir=save_dir) + elif x.get("is_stream"): + assert x["name"] and root_url and save_dir + if not self.stream or self.stream_name != x["name"]: + self.stream = self._setup_stream( + root_url + "stream/" + x["name"], hf_token=hf_token + ) + self.stream_name = x["name"] + chunk = next(self.stream) + path = Path(save_dir or tempfile.gettempdir()) / secrets.token_hex(20) + path.mkdir(parents=True, exist_ok=True) + path = path / x.get("orig_name", "output") + path.write_bytes(chunk) + file_name = str(path) + else: + data = x.get("data") + assert data is not None, f"The 'data' field is missing in {x}" + file_name = utils.decode_base64_to_file(data, dir=save_dir).name + else: + raise ValueError( + f"A FileSerializable component can only deserialize a string or a dict, not a {type(x)}: {x}" + ) + return file_name + + def serialize( + self, + x: str | FileData | None | list[str | FileData | None], + load_dir: str | Path = "", + allow_links: bool = False, + ) -> FileData | None | list[FileData | None]: + """ + Convert from human-friendly version of a file (string filepath) to a + serialized representation (base64) + Parameters: + x: String path to file to serialize + load_dir: Path to directory containing x + allow_links: Will allow path returns instead of raw file content + """ + if x is None or x == "": + return None + if isinstance(x, list): + return [self._serialize_single(f, load_dir, allow_links) for f in x] + else: + return self._serialize_single(x, load_dir, allow_links) + + def deserialize( + self, + x: str | FileData | None | list[str | FileData | None], + save_dir: Path | str | None = None, + root_url: str | None = None, + hf_token: str | None = None, + ) -> str | None | list[str | None]: + """ + Convert from serialized representation of a file (base64) to a human-friendly + version (string filepath). Optionally, save the file to the directory specified by `save_dir` + Parameters: + x: Base64 representation of file to deserialize into a string filepath + save_dir: Path to directory to save the deserialized file to + root_url: If this component is loaded from an external Space, this is the URL of the Space. + hf_token: If this component is loaded from an external private Space, this is the access token for the Space + """ + if x is None: + return None + if isinstance(save_dir, Path): + save_dir = str(save_dir) + if isinstance(x, list): + return [ + self._deserialize_single( + f, save_dir=save_dir, root_url=root_url, hf_token=hf_token + ) + for f in x + ] + else: + return self._deserialize_single( + x, save_dir=save_dir, root_url=root_url, hf_token=hf_token + ) + + +class VideoSerializable(FileSerializable): + def serialized_info(self): + return {"type": "string", "description": "filepath or URL to video file"} + + def api_info(self) -> dict[str, dict | bool]: + return {"info": serializer_types["FileSerializable"], "serialized_info": True} + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": {"is_file": False, "data": media_data.BASE64_VIDEO}, + "serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/video_sample.mp4", + } + + def serialize( + self, x: str | None, load_dir: str | Path = "", allow_links: bool = False + ) -> tuple[FileData | None, None]: + return (super().serialize(x, load_dir, allow_links), None) # type: ignore + + def deserialize( + self, + x: tuple[FileData | None, FileData | None] | None, + save_dir: Path | str | None = None, + root_url: str | None = None, + hf_token: str | None = None, + ) -> str | tuple[str | None, str | None] | None: + """ + Convert from serialized representation of a file (base64) to a human-friendly + version (string filepath). Optionally, save the file to the directory specified by `save_dir` + """ + if isinstance(x, (tuple, list)): + assert len(x) == 2, f"Expected tuple of length 2. Received: {x}" + x_as_list = [x[0], x[1]] + else: + raise ValueError(f"Expected tuple of length 2. Received: {x}") + deserialized_file = super().deserialize(x_as_list, save_dir, root_url, hf_token) # type: ignore + if isinstance(deserialized_file, list): + return deserialized_file[0] # ignore subtitles + + +class JSONSerializable(Serializable): + def serialized_info(self): + return {"type": "string", "description": "filepath to JSON file"} + + def api_info(self) -> dict[str, dict | bool]: + return {"info": serializer_types["JSONSerializable"], "serialized_info": True} + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": {"a": 1, "b": 2}, + "serialized": None, + } + + def serialize( + self, + x: str | None, + load_dir: str | Path = "", + allow_links: bool = False, + ) -> dict | list | None: + """ + Convert from a a human-friendly version (string path to json file) to a + serialized representation (json string) + Parameters: + x: String path to json file to read to get json string + load_dir: Path to directory containing x + """ + if x is None or x == "": + return None + return utils.file_to_json(Path(load_dir) / x) + + def deserialize( + self, + x: str | dict | list, + save_dir: str | Path | None = None, + root_url: str | None = None, + hf_token: str | None = None, + ) -> str | None: + """ + Convert from serialized representation (json string) to a human-friendly + version (string path to json file). Optionally, save the file to the directory specified by `save_dir` + Parameters: + x: Json string + save_dir: Path to save the deserialized json file to + root_url: Ignored + hf_token: Ignored + """ + if x is None: + return None + return utils.dict_or_str_to_json_file(x, dir=save_dir).name + + +class GallerySerializable(Serializable): + def serialized_info(self): + return { + "type": "string", + "description": "path to directory with images and a file associating images with captions called captions.json", + } + + def api_info(self) -> dict[str, dict | bool]: + return { + "info": serializer_types["GallerySerializable"], + "serialized_info": True, + } + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": [media_data.BASE64_IMAGE] * 2, + "serialized": [ + "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", + ] + * 2, + } + + def serialize( + self, x: str | None, load_dir: str | Path = "", allow_links: bool = False + ) -> list[list[str | None]] | None: + if x is None or x == "": + return None + files = [] + captions_file = Path(x) / "captions.json" + with captions_file.open("r") as captions_json: + captions = json.load(captions_json) + for file_name, caption in captions.items(): + img = FileSerializable().serialize(file_name, allow_links=allow_links) + files.append([img, caption]) + return files + + def deserialize( + self, + x: list[list[str | None]] | None, + save_dir: str = "", + root_url: str | None = None, + hf_token: str | None = None, + ) -> None | str: + if x is None: + return None + gallery_path = Path(save_dir) / str(uuid.uuid4()) + gallery_path.mkdir(exist_ok=True, parents=True) + captions = {} + for img_data in x: + if isinstance(img_data, (list, tuple)): + img_data, caption = img_data + else: + caption = None + name = FileSerializable().deserialize( + img_data, gallery_path, root_url=root_url, hf_token=hf_token + ) + captions[name] = caption + captions_file = gallery_path / "captions.json" + with captions_file.open("w") as captions_json: + json.dump(captions, captions_json) + return os.path.abspath(gallery_path) + + +SERIALIZER_MAPPING = {} +for cls in Serializable.__subclasses__(): + SERIALIZER_MAPPING[cls.__name__] = cls + for subcls in cls.__subclasses__(): + SERIALIZER_MAPPING[subcls.__name__] = subcls + +SERIALIZER_MAPPING["Serializable"] = SimpleSerializable +SERIALIZER_MAPPING["File"] = FileSerializable +SERIALIZER_MAPPING["UploadButton"] = FileSerializable + +COMPONENT_MAPPING: dict[str, type] = { + "textbox": StringSerializable, + "number": NumberSerializable, + "slider": NumberSerializable, + "checkbox": BooleanSerializable, + "checkboxgroup": ListStringSerializable, + "radio": StringSerializable, + "dropdown": SimpleSerializable, + "image": ImgSerializable, + "video": FileSerializable, + "audio": FileSerializable, + "file": FileSerializable, + "dataframe": JSONSerializable, + "timeseries": JSONSerializable, + "state": SimpleSerializable, + "button": StringSerializable, + "uploadbutton": FileSerializable, + "colorpicker": StringSerializable, + "label": JSONSerializable, + "highlightedtext": JSONSerializable, + "json": JSONSerializable, + "html": StringSerializable, + "gallery": GallerySerializable, + "chatbot": JSONSerializable, + "model3d": FileSerializable, + "plot": JSONSerializable, + "barplot": JSONSerializable, + "lineplot": JSONSerializable, + "scatterplot": JSONSerializable, + "markdown": StringSerializable, + "code": StringSerializable, + "annotatedimage": JSONSerializable, +} diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/templates/discord_chat.py b/testbed/gradio-app__gradio/client/python/gradio_client/templates/discord_chat.py new file mode 100644 index 0000000000000000000000000000000000000000..881ff65fb5e5b4599cb213c6cfa8fa236b09b509 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/templates/discord_chat.py @@ -0,0 +1,193 @@ +import asyncio +import os +import threading +from threading import Event +from typing import Optional + +import discord +import gradio as gr +from discord import Permissions +from discord.ext import commands +from discord.utils import oauth_url + +import gradio_client as grc +from gradio_client.utils import QueueError + +event = Event() + +DISCORD_TOKEN = os.getenv("DISCORD_TOKEN") + + +async def wait(job): + while not job.done(): + await asyncio.sleep(0.2) + + +def get_client(session: Optional[str] = None) -> grc.Client: + client = grc.Client("<>", hf_token=os.getenv("HF_TOKEN")) + if session: + client.session_hash = session + return client + + +def truncate_response(response: str) -> str: + ending = "...\nTruncating response to 2000 characters due to discord api limits." + if len(response) > 2000: + return response[: 2000 - len(ending)] + ending + else: + return response + + +intents = discord.Intents.default() +intents.message_content = True +bot = commands.Bot(command_prefix="/", intents=intents) + + +@bot.event +async def on_ready(): + print(f"Logged in as {bot.user} (ID: {bot.user.id})") + synced = await bot.tree.sync() + print(f"Synced commands: {', '.join([s.name for s in synced])}.") + event.set() + print("------") + + +thread_to_client = {} +thread_to_user = {} + + +@bot.hybrid_command( + name="<>", + description="Enter some text to chat with the bot! Like this: /<> Hello, how are you?", +) +async def chat(ctx, prompt: str): + if ctx.author.id == bot.user.id: + return + try: + message = await ctx.send("Creating thread...") + + thread = await message.create_thread(name=prompt) + loop = asyncio.get_running_loop() + client = await loop.run_in_executor(None, get_client, None) + job = client.submit(prompt, api_name="/<>") + await wait(job) + + try: + job.result() + response = job.outputs()[-1] + await thread.send(truncate_response(response)) + thread_to_client[thread.id] = client + thread_to_user[thread.id] = ctx.author.id + except QueueError: + await thread.send( + "The gradio space powering this bot is really busy! Please try again later!" + ) + + except Exception as e: + print(f"{e}") + + +async def continue_chat(message): + """Continues a given conversation based on chathistory""" + try: + client = thread_to_client[message.channel.id] + prompt = message.content + job = client.submit(prompt, api_name="/<>") + await wait(job) + try: + job.result() + response = job.outputs()[-1] + await message.reply(truncate_response(response)) + except QueueError: + await message.reply( + "The gradio space powering this bot is really busy! Please try again later!" + ) + + except Exception as e: + print(f"Error: {e}") + + +@bot.event +async def on_message(message): + """Continue the chat""" + try: + if not message.author.bot: + if message.channel.id in thread_to_user: + if thread_to_user[message.channel.id] == message.author.id: + await continue_chat(message) + else: + await bot.process_commands(message) + + except Exception as e: + print(f"Error: {e}") + + +# running in thread +def run_bot(): + if not DISCORD_TOKEN: + print("DISCORD_TOKEN NOT SET") + event.set() + else: + bot.run(DISCORD_TOKEN) + + +threading.Thread(target=run_bot).start() + +event.wait() + +if not DISCORD_TOKEN: + welcome_message = """ + + ## You have not specified a DISCORD_TOKEN, which means you have not created a bot account. Please follow these steps: + + ### 1. Go to https://discord.com/developers/applications and click 'New Application' + + ### 2. Give your bot a name 🤖 + + ![](https://gradio-builds.s3.amazonaws.com/demo-files/discordbots/BotName.png) + + ## 3. In Settings > Bot, click the 'Reset Token' button to get a new token. Write it down and keep it safe 🔐 + + ![](https://gradio-builds.s3.amazonaws.com/demo-files/discordbots/ResetToken.png) + + ## 4. Optionally make the bot public if you want anyone to be able to add it to their servers + + ## 5. Scroll down and enable 'Message Content Intent' under 'Priviledged Gateway Intents' + + ![](https://gradio-builds.s3.amazonaws.com/demo-files/discordbots/MessageContentIntent.png) + + ## 6. Save your changes! + + ## 7. The token from step 3 is the DISCORD_TOKEN. Rerun the deploy_discord command, e.g client.deploy_discord(discord_bot_token=DISCORD_TOKEN, ...), or add the token as a space secret manually. +""" +else: + permissions = Permissions(326417525824) + url = oauth_url(bot.user.id, permissions=permissions) + welcome_message = f""" + ## Add this bot to your server by clicking this link: + + {url} + + ## How to use it? + + The bot can be triggered via `/<>` followed by your text prompt. + + This will create a thread with the bot's response to your text prompt. + You can reply in the thread (without `/<>`) to continue the conversation. + In the thread, the bot will only reply to the original author of the command. + + ⚠️ Note ⚠️: Please make sure this bot's command does have the same name as another command in your server. + + ⚠️ Note ⚠️: Bot commands do not work in DMs with the bot as of now. + """ + + +with gr.Blocks() as demo: + gr.Markdown( + f""" + # Discord bot of <> + {welcome_message} + """ + ) + +demo.launch() diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/types.json b/testbed/gradio-app__gradio/client/python/gradio_client/types.json new file mode 100644 index 0000000000000000000000000000000000000000..8fe876126f6007aec0afca30f7a2b4c05ce1b232 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/types.json @@ -0,0 +1,199 @@ +{ + "SimpleSerializable": { + "type": {}, + "description": "any valid value" + }, + "StringSerializable": { + "type": "string" + }, + "ListStringSerializable": { + "type": "array", + "items": { + "type": "string" + } + }, + "BooleanSerializable": { + "type": "boolean" + }, + "NumberSerializable": { + "type": "number" + }, + "ImgSerializable": { + "type": "string", + "description": "base64 representation of an image" + }, + "FileSerializable": { + "oneOf": [ + { + "type": "string", + "description": "filepath or URL to file" + }, + { + "type": "object", + "properties": { + "name": { "type": "string", "description": "name of file" }, + "data": { + "type": "string", + "description": "base64 representation of file" + }, + "size": { + "type": "integer", + "description": "size of image in bytes" + }, + "is_file": { + "type": "boolean", + "description": "true if the file has been uploaded to the server" + }, + "orig_name": { + "type": "string", + "description": "original name of the file" + } + }, + "required": ["name", "data"] + }, + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string", + "description": "filepath or URL to file" + }, + { + "type": "object", + "properties": { + "name": { "type": "string", "description": "name of file" }, + "data": { + "type": "string", + "description": "base64 representation of file" + }, + "size": { + "type": "integer", + "description": "size of image in bytes" + }, + "is_file": { + "type": "boolean", + "description": "true if the file has been uploaded to the server" + }, + "orig_name": { + "type": "string", + "description": "original name of the file" + } + }, + "required": ["name", "data"] + } + ] + } + } + ] + }, + "SingleFileSerializable": { + "oneOf": [ + { + "type": "string", + "description": "filepath or URL to file" + }, + { + "type": "object", + "properties": { + "name": { "type": "string", "description": "name of file" }, + "data": { + "type": "string", + "description": "base64 representation of file" + }, + "size": { + "type": "integer", + "description": "size of image in bytes" + }, + "is_file": { + "type": "boolean", + "description": "true if the file has been uploaded to the server" + }, + "orig_name": { + "type": "string", + "description": "original name of the file" + } + }, + "required": ["name", "data"] + } + ] + }, + "MultipleFileSerializable": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string", + "description": "filepath or URL to file" + }, + { + "type": "object", + "properties": { + "name": { "type": "string", "description": "name of file" }, + "data": { + "type": "string", + "description": "base64 representation of file" + }, + "size": { + "type": "integer", + "description": "size of image in bytes" + }, + "is_file": { + "type": "boolean", + "description": "true if the file has been uploaded to the server" + }, + "orig_name": { + "type": "string", + "description": "original name of the file" + } + }, + "required": ["name", "data"] + } + ] + } + }, + "JSONSerializable": { + "type": {}, + "description": "any valid json" + }, + "GallerySerializable": { + "type": "array", + "items": { + "type": "array", + "items": false, + "maxSize": 2, + "minSize": 2, + "prefixItems": [ + { + "type": "object", + "properties": { + "name": { "type": "string", "description": "name of file" }, + "data": { + "type": "string", + "description": "base64 representation of file" + }, + "size": { + "type": "integer", + "description": "size of image in bytes" + }, + "is_file": { + "type": "boolean", + "description": "true if the file has been uploaded to the server" + }, + "orig_name": { + "type": "string", + "description": "original name of the file" + } + }, + "required": ["name", "data"] + }, + { + "oneOf": [ + { "type": "string", "description": "caption of image" }, + { "type": "null" } + ] + } + ] + } + } +} diff --git a/testbed/gradio-app__gradio/client/python/gradio_client/utils.py b/testbed/gradio-app__gradio/client/python/gradio_client/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..421ff11ac121510789053645f39cc11da1e07bfd --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/gradio_client/utils.py @@ -0,0 +1,598 @@ +from __future__ import annotations + +import asyncio +import base64 +import json +import mimetypes +import os +import pkgutil +import secrets +import shutil +import tempfile +import warnings +from concurrent.futures import CancelledError +from dataclasses import dataclass, field +from datetime import datetime +from enum import Enum +from pathlib import Path +from threading import Lock +from typing import Any, Callable, Optional + +import fsspec.asyn +import httpx +import huggingface_hub +import requests +from huggingface_hub import SpaceStage +from websockets.legacy.protocol import WebSocketCommonProtocol + +API_URL = "api/predict/" +WS_URL = "queue/join" +UPLOAD_URL = "upload" +CONFIG_URL = "config" +API_INFO_URL = "info" +RAW_API_INFO_URL = "info?serialize=False" +SPACE_FETCHER_URL = "https://gradio-space-api-fetcher-v2.hf.space/api" +RESET_URL = "reset" +SPACE_URL = "https://hf.space/{}" + +SKIP_COMPONENTS = { + "state", + "row", + "column", + "tabs", + "tab", + "tabitem", + "box", + "form", + "accordion", + "group", + "interpretation", + "dataset", +} +STATE_COMPONENT = "state" +INVALID_RUNTIME = [ + SpaceStage.NO_APP_FILE, + SpaceStage.CONFIG_ERROR, + SpaceStage.BUILD_ERROR, + SpaceStage.RUNTIME_ERROR, + SpaceStage.PAUSED, +] + + +def get_package_version() -> str: + try: + package_json_data = ( + pkgutil.get_data(__name__, "package.json").decode("utf-8").strip() # type: ignore + ) + package_data = json.loads(package_json_data) + version = package_data.get("version", "") + return version + except Exception: + return "" + + +__version__ = get_package_version() + + +class TooManyRequestsError(Exception): + """Raised when the API returns a 429 status code.""" + + pass + + +class QueueError(Exception): + """Raised when the queue is full or there is an issue adding a job to the queue.""" + + pass + + +class InvalidAPIEndpointError(Exception): + """Raised when the API endpoint is invalid.""" + + pass + + +class SpaceDuplicationError(Exception): + """Raised when something goes wrong with a Space Duplication.""" + + pass + + +class Status(Enum): + """Status codes presented to client users.""" + + STARTING = "STARTING" + JOINING_QUEUE = "JOINING_QUEUE" + QUEUE_FULL = "QUEUE_FULL" + IN_QUEUE = "IN_QUEUE" + SENDING_DATA = "SENDING_DATA" + PROCESSING = "PROCESSING" + ITERATING = "ITERATING" + PROGRESS = "PROGRESS" + FINISHED = "FINISHED" + CANCELLED = "CANCELLED" + + @staticmethod + def ordering(status: Status) -> int: + """Order of messages. Helpful for testing.""" + order = [ + Status.STARTING, + Status.JOINING_QUEUE, + Status.QUEUE_FULL, + Status.IN_QUEUE, + Status.SENDING_DATA, + Status.PROCESSING, + Status.PROGRESS, + Status.ITERATING, + Status.FINISHED, + Status.CANCELLED, + ] + return order.index(status) + + def __lt__(self, other: Status): + return self.ordering(self) < self.ordering(other) + + @staticmethod + def msg_to_status(msg: str) -> Status: + """Map the raw message from the backend to the status code presented to users.""" + return { + "send_hash": Status.JOINING_QUEUE, + "queue_full": Status.QUEUE_FULL, + "estimation": Status.IN_QUEUE, + "send_data": Status.SENDING_DATA, + "process_starts": Status.PROCESSING, + "process_generating": Status.ITERATING, + "process_completed": Status.FINISHED, + "progress": Status.PROGRESS, + }[msg] + + +@dataclass +class ProgressUnit: + index: Optional[int] + length: Optional[int] + unit: Optional[str] + progress: Optional[float] + desc: Optional[str] + + @classmethod + def from_ws_msg(cls, data: list[dict]) -> list[ProgressUnit]: + return [ + cls( + index=d.get("index"), + length=d.get("length"), + unit=d.get("unit"), + progress=d.get("progress"), + desc=d.get("desc"), + ) + for d in data + ] + + +@dataclass +class StatusUpdate: + """Update message sent from the worker thread to the Job on the main thread.""" + + code: Status + rank: int | None + queue_size: int | None + eta: float | None + success: bool | None + time: datetime | None + progress_data: list[ProgressUnit] | None + + +def create_initial_status_update(): + return StatusUpdate( + code=Status.STARTING, + rank=None, + queue_size=None, + eta=None, + success=None, + time=datetime.now(), + progress_data=None, + ) + + +@dataclass +class JobStatus: + """The job status. + + Keeps track of the latest status update and intermediate outputs (not yet implements). + """ + + latest_status: StatusUpdate = field(default_factory=create_initial_status_update) + outputs: list[Any] = field(default_factory=list) + + +@dataclass +class Communicator: + """Helper class to help communicate between the worker thread and main thread.""" + + lock: Lock + job: JobStatus + prediction_processor: Callable[..., tuple] + reset_url: str + should_cancel: bool = False + + +######################## +# Network utils +######################## + + +def is_http_url_like(possible_url: str) -> bool: + """ + Check if the given string looks like an HTTP(S) URL. + """ + return possible_url.startswith(("http://", "https://")) + + +def probe_url(possible_url: str) -> bool: + """ + Probe the given URL to see if it responds with a 200 status code (to HEAD, then to GET). + """ + headers = {"User-Agent": "gradio (https://gradio.app/; team@gradio.app)"} + try: + with requests.session() as sess: + head_request = sess.head(possible_url, headers=headers) + if head_request.status_code == 405: + return sess.get(possible_url, headers=headers).ok + return head_request.ok + except Exception: + return False + + +def is_valid_url(possible_url: str) -> bool: + """ + Check if the given string is a valid URL. + """ + warnings.warn( + "is_valid_url should not be used. " + "Use is_http_url_like() and probe_url(), as suitable, instead.", + ) + return is_http_url_like(possible_url) and probe_url(possible_url) + + +async def get_pred_from_ws( + websocket: WebSocketCommonProtocol, + data: str, + hash_data: str, + helper: Communicator | None = None, +) -> dict[str, Any]: + completed = False + resp = {} + while not completed: + # Receive message in the background so that we can + # cancel even while running a long pred + task = asyncio.create_task(websocket.recv()) + while not task.done(): + if helper: + with helper.lock: + if helper.should_cancel: + # Need to reset the iterator state since the client + # will not reset the session + async with httpx.AsyncClient() as http: + reset = http.post( + helper.reset_url, json=json.loads(hash_data) + ) + # Retrieve cancel exception from task + # otherwise will get nasty warning in console + task.cancel() + await asyncio.gather(task, reset, return_exceptions=True) + raise CancelledError() + # Need to suspend this coroutine so that task actually runs + await asyncio.sleep(0.01) + msg = task.result() + resp = json.loads(msg) + if helper: + with helper.lock: + has_progress = "progress_data" in resp + status_update = StatusUpdate( + code=Status.msg_to_status(resp["msg"]), + queue_size=resp.get("queue_size"), + rank=resp.get("rank", None), + success=resp.get("success"), + time=datetime.now(), + eta=resp.get("rank_eta"), + progress_data=ProgressUnit.from_ws_msg(resp["progress_data"]) + if has_progress + else None, + ) + output = resp.get("output", {}).get("data", []) + if output and status_update.code != Status.FINISHED: + try: + result = helper.prediction_processor(*output) + except Exception as e: + result = [e] + helper.job.outputs.append(result) + helper.job.latest_status = status_update + if resp["msg"] == "queue_full": + raise QueueError("Queue is full! Please try again.") + if resp["msg"] == "send_hash": + await websocket.send(hash_data) + elif resp["msg"] == "send_data": + await websocket.send(data) + completed = resp["msg"] == "process_completed" + return resp["output"] + + +######################## +# Data processing utils +######################## + + +def download_tmp_copy_of_file( + url_path: str, hf_token: str | None = None, dir: str | None = None +) -> str: + if dir is not None: + os.makedirs(dir, exist_ok=True) + headers = {"Authorization": "Bearer " + hf_token} if hf_token else {} + directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20) + directory.mkdir(exist_ok=True, parents=True) + file_path = directory / Path(url_path).name + + with requests.get(url_path, headers=headers, stream=True) as r: + r.raise_for_status() + with open(file_path, "wb") as f: + shutil.copyfileobj(r.raw, f) + return str(file_path.resolve()) + + +def create_tmp_copy_of_file(file_path: str, dir: str | None = None) -> str: + directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20) + directory.mkdir(exist_ok=True, parents=True) + dest = directory / Path(file_path).name + shutil.copy2(file_path, dest) + return str(dest.resolve()) + + +def get_mimetype(filename: str) -> str | None: + if filename.endswith(".vtt"): + return "text/vtt" + mimetype = mimetypes.guess_type(filename)[0] + if mimetype is not None: + mimetype = mimetype.replace("x-wav", "wav").replace("x-flac", "flac") + return mimetype + + +def get_extension(encoding: str) -> str | None: + encoding = encoding.replace("audio/wav", "audio/x-wav") + type = mimetypes.guess_type(encoding)[0] + if type == "audio/flac": # flac is not supported by mimetypes + return "flac" + elif type is None: + return None + extension = mimetypes.guess_extension(type) + if extension is not None and extension.startswith("."): + extension = extension[1:] + return extension + + +def encode_file_to_base64(f: str | Path): + with open(f, "rb") as file: + encoded_string = base64.b64encode(file.read()) + base64_str = str(encoded_string, "utf-8") + mimetype = get_mimetype(str(f)) + return ( + "data:" + + (mimetype if mimetype is not None else "") + + ";base64," + + base64_str + ) + + +def encode_url_to_base64(url: str): + resp = requests.get(url) + resp.raise_for_status() + encoded_string = base64.b64encode(resp.content) + base64_str = str(encoded_string, "utf-8") + mimetype = get_mimetype(url) + return ( + "data:" + (mimetype if mimetype is not None else "") + ";base64," + base64_str + ) + + +def encode_url_or_file_to_base64(path: str | Path): + path = str(path) + if is_http_url_like(path): + return encode_url_to_base64(path) + return encode_file_to_base64(path) + + +def download_byte_stream(url: str, hf_token=None): + arr = bytearray() + headers = {"Authorization": "Bearer " + hf_token} if hf_token else {} + with httpx.stream("GET", url, headers=headers) as r: + for data in r.iter_bytes(): + arr += data + yield data + yield arr + + +def decode_base64_to_binary(encoding: str) -> tuple[bytes, str | None]: + extension = get_extension(encoding) + data = encoding.rsplit(",", 1)[-1] + return base64.b64decode(data), extension + + +def strip_invalid_filename_characters(filename: str, max_bytes: int = 200) -> str: + """Strips invalid characters from a filename and ensures that the file_length is less than `max_bytes` bytes.""" + filename = "".join([char for char in filename if char.isalnum() or char in "._- "]) + filename_len = len(filename.encode()) + if filename_len > max_bytes: + while filename_len > max_bytes: + if len(filename) == 0: + break + filename = filename[:-1] + filename_len = len(filename.encode()) + return filename + + +def sanitize_parameter_names(original_name: str) -> str: + """Cleans up a Python parameter name to make the API info more readable.""" + return ( + "".join([char for char in original_name if char.isalnum() or char in " _"]) + .replace(" ", "_") + .lower() + ) + + +def decode_base64_to_file( + encoding: str, + file_path: str | None = None, + dir: str | Path | None = None, + prefix: str | None = None, +): + directory = Path(dir or tempfile.gettempdir()) / secrets.token_hex(20) + directory.mkdir(exist_ok=True, parents=True) + data, extension = decode_base64_to_binary(encoding) + if file_path is not None and prefix is None: + filename = Path(file_path).name + prefix = filename + if "." in filename: + prefix = filename[0 : filename.index(".")] + extension = filename[filename.index(".") + 1 :] + + if prefix is not None: + prefix = strip_invalid_filename_characters(prefix) + + if extension is None: + file_obj = tempfile.NamedTemporaryFile( + delete=False, prefix=prefix, dir=directory + ) + else: + file_obj = tempfile.NamedTemporaryFile( + delete=False, + prefix=prefix, + suffix="." + extension, + dir=directory, + ) + file_obj.write(data) + file_obj.flush() + return file_obj + + +def dict_or_str_to_json_file(jsn: str | dict | list, dir: str | Path | None = None): + if dir is not None: + os.makedirs(dir, exist_ok=True) + + file_obj = tempfile.NamedTemporaryFile( + delete=False, suffix=".json", dir=dir, mode="w+" + ) + if isinstance(jsn, str): + jsn = json.loads(jsn) + json.dump(jsn, file_obj) + file_obj.flush() + return file_obj + + +def file_to_json(file_path: str | Path) -> dict | list: + with open(file_path) as f: + return json.load(f) + + +########################### +# HuggingFace Hub API Utils +########################### +def set_space_timeout( + space_id: str, + hf_token: str | None = None, + timeout_in_seconds: int = 300, +): + headers = huggingface_hub.utils.build_hf_headers( + token=hf_token, + library_name="gradio_client", + library_version=__version__, + ) + req = requests.post( + f"https://huggingface.co/api/spaces/{space_id}/sleeptime", + json={"seconds": timeout_in_seconds}, + headers=headers, + ) + try: + huggingface_hub.utils.hf_raise_for_status(req) + except huggingface_hub.utils.HfHubHTTPError as err: + raise SpaceDuplicationError( + f"Could not set sleep timeout on duplicated Space. Please visit {SPACE_URL.format(space_id)} " + "to set a timeout manually to reduce billing charges." + ) from err + + +######################## +# Misc utils +######################## + + +def synchronize_async(func: Callable, *args, **kwargs) -> Any: + """ + Runs async functions in sync scopes. Can be used in any scope. + + Example: + if inspect.iscoroutinefunction(block_fn.fn): + predictions = utils.synchronize_async(block_fn.fn, *processed_input) + + Args: + func: + *args: + **kwargs: + """ + return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) # type: ignore + + +class APIInfoParseError(ValueError): + pass + + +def get_type(schema: dict): + if "type" in schema: + return schema["type"] + elif schema.get("oneOf"): + return "oneOf" + elif schema.get("anyOf"): + return "anyOf" + else: + raise APIInfoParseError(f"Cannot parse type for {schema}") + + +def json_schema_to_python_type(schema: Any) -> str: + """Convert the json schema into a python type hint""" + type_ = get_type(schema) + if type_ == {}: + if "json" in schema["description"]: + return "Dict[Any, Any]" + else: + return "Any" + elif type_ == "null": + return "None" + elif type_ == "integer": + return "int" + elif type_ == "string": + return "str" + elif type_ == "boolean": + return "bool" + elif type_ == "number": + return "int | float" + elif type_ == "array": + items = schema.get("items") + if "prefixItems" in items: + elements = ", ".join( + [json_schema_to_python_type(i) for i in items["prefixItems"]] + ) + return f"Tuple[{elements}]" + else: + elements = json_schema_to_python_type(items) + return f"List[{elements}]" + elif type_ == "object": + des = ", ".join( + [ + f"{n}: {json_schema_to_python_type(v)} ({v.get('description')})" + for n, v in schema["properties"].items() + ] + ) + return f"Dict({des})" + elif type_ in ["oneOf", "anyOf"]: + desc = " | ".join([json_schema_to_python_type(i) for i in schema[type_]]) + return desc + else: + raise APIInfoParseError(f"Cannot parse schema {schema}") diff --git a/testbed/gradio-app__gradio/client/python/pyproject.toml b/testbed/gradio-app__gradio/client/python/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..2a4f5f4088c48fc6c4d9ef0492f08543b7fac2c8 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/pyproject.toml @@ -0,0 +1,70 @@ +[build-system] +requires = ["hatchling", "hatch-requirements-txt", "hatch-fancy-pypi-readme>=22.5.0"] +build-backend = "hatchling.build" + +[project] +name = "gradio_client" +dynamic = ["version", "dependencies", "readme"] +description = "Python library for easily interacting with trained machine learning models" +license = "Apache-2.0" +requires-python = ">=3.8" +authors = [ + { name = "Abubakar Abid", email = "team@gradio.app" }, + { name = "Ali Abid", email = "team@gradio.app" }, + { name = "Ali Abdalla", email = "team@gradio.app" }, + { name = "Dawood Khan", email = "team@gradio.app" }, + { name = "Ahsen Khaliq", email = "team@gradio.app" }, + { name = "Pete Allen", email = "team@gradio.app" }, + { name = "Freddy Boulton", email = "team@gradio.app" }, +] +keywords = ["machine learning", "client", "API"] + +classifiers = [ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3 :: Only', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Artificial Intelligence', + 'Topic :: Software Development :: User Interfaces', +] + +[project.urls] +Homepage = "https://github.com/gradio-app/gradio" + +[tool.hatch.version] +path = "gradio_client/package.json" +pattern = ".*\"version\":\\s*\"(?P[^\"]+)\"" + +[tool.hatch.metadata.hooks.requirements_txt] +filename = "requirements.txt" + +[tool.hatch.metadata.hooks.fancy-pypi-readme] +content-type = "text/markdown" +fragments = [ + { path = "README.md" }, +] + +[tool.hatch.build.targets.sdist] +include = [ + "/gradio_client", + "/README.md", + "/requirements.txt", +] + +[tool.ruff] +extend = "../../pyproject.toml" + +[tool.ruff.isort] +known-first-party = [ + "gradio_client" +] + +[tool.pytest.ini_options] +GRADIO_ANALYTICS_ENABLED = "False" +HF_HUB_DISABLE_TELEMETRY = "1" \ No newline at end of file diff --git a/testbed/gradio-app__gradio/client/python/requirements.txt b/testbed/gradio-app__gradio/client/python/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..462527ae45b8f6e3f83e8bd37a5851561f1a0e2c --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/requirements.txt @@ -0,0 +1,7 @@ +fsspec +httpx +huggingface_hub>=0.13.0 +packaging +requests~=2.0 +typing_extensions~=4.0 +websockets>=10.0,<12.0 diff --git a/testbed/gradio-app__gradio/client/python/scripts/build_pypi.sh b/testbed/gradio-app__gradio/client/python/scripts/build_pypi.sh new file mode 100644 index 0000000000000000000000000000000000000000..23e635237237de0cefddb5575fb02cf036af854d --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/scripts/build_pypi.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -e + +cd "$(dirname ${0})/.." + +python3 -m pip install build +rm -rf dist/* +rm -rf build/* +python3 -m build diff --git a/testbed/gradio-app__gradio/client/python/scripts/check_pypi.py b/testbed/gradio-app__gradio/client/python/scripts/check_pypi.py new file mode 100644 index 0000000000000000000000000000000000000000..8f2cf69930b9fd7685c5e3b02893edfb7158523f --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/scripts/check_pypi.py @@ -0,0 +1,17 @@ +import json +import sys +import urllib.request +from pathlib import Path + +version_file = Path(__file__).parent.parent / "gradio_client" / "package.json" +with version_file.open() as f: + version = json.load(f)["version"] + +with urllib.request.urlopen("https://pypi.org/pypi/gradio_client/json") as url: + releases = json.load(url)["releases"] + +if version in releases: + print(f"Version {version} already exists on PyPI") + sys.exit(1) +else: + print(f"Version {version} does not exist on PyPI") diff --git a/testbed/gradio-app__gradio/client/python/scripts/format.sh b/testbed/gradio-app__gradio/client/python/scripts/format.sh new file mode 100644 index 0000000000000000000000000000000000000000..c78fa8efbc5834bc317e007cd095ff6efca09262 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/scripts/format.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +cd "$(dirname ${0})/.." + +echo "Formatting the client library.. Our style follows the Black code style." +python -m ruff --fix . +python -m black . + +echo "Type checking the client library with pyright" +python -m pyright gradio_client/*.py diff --git a/testbed/gradio-app__gradio/client/python/scripts/lint.sh b/testbed/gradio-app__gradio/client/python/scripts/lint.sh new file mode 100644 index 0000000000000000000000000000000000000000..c2207f3ff9ab0e556129841f7632eb5686de19e7 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/scripts/lint.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +cd "$(dirname ${0})/.." + +echo "Linting..." +python -m ruff test gradio_client +python -m black --check test gradio_client + +echo "Type checking the client library with pyright" +python -m pyright gradio_client/*.py diff --git a/testbed/gradio-app__gradio/client/python/scripts/test.sh b/testbed/gradio-app__gradio/client/python/scripts/test.sh new file mode 100644 index 0000000000000000000000000000000000000000..73c27ba7efc51d4ec9309430e1c577eab51370b7 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/scripts/test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +cd "$(dirname ${0})/.." + +echo "Testing..." +python -m pytest test diff --git a/testbed/gradio-app__gradio/client/python/test/conftest.py b/testbed/gradio-app__gradio/client/python/test/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..13a3d237501a469681f86f9484155cebade0d5d0 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/test/conftest.py @@ -0,0 +1,344 @@ +import inspect +import random +import time + +import gradio as gr +import pytest +from pydub import AudioSegment + + +def pytest_configure(config): + config.addinivalue_line( + "markers", "flaky: mark test as flaky. Failure will not cause te" + ) + + +@pytest.fixture +def calculator_demo(): + def calculator(num1, operation, num2): + if operation == "add": + return num1 + num2 + elif operation == "subtract": + return num1 - num2 + elif operation == "multiply": + return num1 * num2 + elif operation == "divide": + if num2 == 0: + raise gr.Error("Cannot divide by zero!") + return num1 / num2 + + demo = gr.Interface( + calculator, + ["number", gr.Radio(["add", "subtract", "multiply", "divide"]), "number"], + "number", + examples=[ + [5, "add", 3], + [4, "divide", 2], + [-4, "multiply", 2.5], + [0, "subtract", 1.2], + ], + ) + return demo.queue() + + +@pytest.fixture +def state_demo(): + demo = gr.Interface( + lambda x, y: (x, y), + ["textbox", "state"], + ["textbox", "state"], + ) + return demo.queue() + + +@pytest.fixture +def increment_demo(): + with gr.Blocks() as demo: + btn1 = gr.Button("Increment") + btn2 = gr.Button("Increment") + btn3 = gr.Button("Increment") + numb = gr.Number() + + state = gr.State(0) + + btn1.click( + lambda x: (x + 1, x + 1), + state, + [state, numb], + api_name="increment_with_queue", + ) + btn2.click( + lambda x: (x + 1, x + 1), + state, + [state, numb], + queue=False, + api_name="increment_without_queue", + ) + btn3.click( + lambda x: (x + 1, x + 1), + state, + [state, numb], + api_name=False, + ) + + return demo.queue() + + +@pytest.fixture +def progress_demo(): + def my_function(x, progress=gr.Progress()): + progress(0, desc="Starting...") + for _ in progress.tqdm(range(20)): + time.sleep(0.1) + return x + + return gr.Interface(my_function, gr.Textbox(), gr.Textbox()).queue() + + +@pytest.fixture +def yield_demo(): + def spell(x): + for i in range(len(x)): + time.sleep(0.5) + yield x[:i] + + return gr.Interface(spell, "textbox", "textbox").queue() + + +@pytest.fixture +def cancel_from_client_demo(): + def iteration(): + for i in range(20): + print(f"i: {i}") + yield i + time.sleep(0.5) + + def long_process(): + time.sleep(10) + print("DONE!") + return 10 + + with gr.Blocks() as demo: + num = gr.Number() + + btn = gr.Button(value="Iterate") + btn.click(iteration, None, num, api_name="iterate") + btn2 = gr.Button(value="Long Process") + btn2.click(long_process, None, num, api_name="long") + + return demo.queue(concurrency_count=40) + + +@pytest.fixture +def sentiment_classification_demo(): + def classifier(text): + return {label: random.random() for label in ["POSITIVE", "NEGATIVE", "NEUTRAL"]} + + def sleep_for_test(): + time.sleep(10) + return 2 + + with gr.Blocks(theme="gstaff/xkcd") as demo: + with gr.Row(): + with gr.Column(): + input_text = gr.Textbox(label="Input Text") + with gr.Row(): + classify = gr.Button("Classify Sentiment") + with gr.Column(): + label = gr.Label(label="Predicted Sentiment") + number = gr.Number() + btn = gr.Button("Sleep then print") + classify.click(classifier, input_text, label, api_name="classify") + btn.click(sleep_for_test, None, number, api_name="sleep") + + return demo + + +@pytest.fixture +def count_generator_demo(): + def count(n): + for i in range(int(n)): + time.sleep(0.5) + yield i + + def show(n): + return str(list(range(int(n)))) + + with gr.Blocks() as demo: + with gr.Column(): + num = gr.Number(value=10) + with gr.Row(): + count_btn = gr.Button("Count") + list_btn = gr.Button("List") + with gr.Column(): + out = gr.Textbox() + + count_btn.click(count, num, out) + list_btn.click(show, num, out) + + return demo.queue() + + +@pytest.fixture +def count_generator_demo_exception(): + def count(n): + for i in range(int(n)): + time.sleep(0.1) + if i == 5: + raise ValueError("Oh no!") + yield i + + def show(n): + return str(list(range(int(n)))) + + with gr.Blocks() as demo: + with gr.Column(): + num = gr.Number(value=10) + with gr.Row(): + count_btn = gr.Button("Count") + count_forever = gr.Button("Count forever") + with gr.Column(): + out = gr.Textbox() + + count_btn.click(count, num, out, api_name="count") + count_forever.click(show, num, out, api_name="count_forever", every=3) + return demo.queue() + + +@pytest.fixture +def file_io_demo(): + demo = gr.Interface( + lambda x: print("foox"), + [gr.File(file_count="multiple"), "file"], + [gr.File(file_count="multiple"), "file"], + ) + + return demo + + +@pytest.fixture +def stateful_chatbot(): + with gr.Blocks() as demo: + chatbot = gr.Chatbot() + msg = gr.Textbox() + clear = gr.Button("Clear") + st = gr.State([1, 2, 3]) + + def respond(message, st, chat_history): + assert st[0] == 1 and st[1] == 2 and st[2] == 3 + bot_message = "I love you" + chat_history.append((message, bot_message)) + return "", chat_history + + msg.submit(respond, [msg, st, chatbot], [msg, chatbot], api_name="submit") + clear.click(lambda: None, None, chatbot, queue=False) + demo.queue() + return demo + + +@pytest.fixture +def hello_world_with_group(): + with gr.Blocks() as demo: + name = gr.Textbox(label="name") + output = gr.Textbox(label="greeting") + greet = gr.Button("Greet") + show_group = gr.Button("Show group") + with gr.Group(visible=False) as group: + gr.Textbox("Hello!") + + def greeting(name): + return f"Hello {name}", gr.Group.update(visible=True) + + greet.click( + greeting, inputs=[name], outputs=[output, group], api_name="greeting" + ) + show_group.click( + lambda: gr.Group.update(visible=False), None, group, api_name="show_group" + ) + return demo + + +@pytest.fixture +def hello_world_with_state_and_accordion(): + with gr.Blocks() as demo: + with gr.Row(): + name = gr.Textbox(label="name") + output = gr.Textbox(label="greeting") + num = gr.Number(label="count") + with gr.Row(): + n_counts = gr.State(value=0) + greet = gr.Button("Greet") + open_acc = gr.Button("Open acc") + close_acc = gr.Button("Close acc") + with gr.Accordion(label="Extra stuff", open=False) as accordion: + gr.Textbox("Hello!") + + def greeting(name, state): + state += 1 + return state, f"Hello {name}", state, gr.Accordion.update(open=False) + + greet.click( + greeting, + inputs=[name, n_counts], + outputs=[n_counts, output, num, accordion], + api_name="greeting", + ) + open_acc.click( + lambda state: (state + 1, state + 1, gr.Accordion.update(open=True)), + [n_counts], + [n_counts, num, accordion], + api_name="open", + ) + close_acc.click( + lambda state: (state + 1, state + 1, gr.Accordion.update(open=False)), + [n_counts], + [n_counts, num, accordion], + api_name="close", + ) + return demo + + +@pytest.fixture +def stream_audio(): + import pathlib + import tempfile + + def _stream_audio(audio_file): + audio = AudioSegment.from_mp3(audio_file) + i = 0 + chunk_size = 3000 + + while chunk_size * i < len(audio): + chunk = audio[chunk_size * i : chunk_size * (i + 1)] + i += 1 + if chunk: + file = str(pathlib.Path(tempfile.gettempdir()) / f"{i}.wav") + chunk.export(file, format="wav") + yield file + + return gr.Interface( + fn=_stream_audio, + inputs=gr.Audio(type="filepath", label="Audio file to stream"), + outputs=gr.Audio(autoplay=True, streaming=True), + ).queue() + + +@pytest.fixture +def all_components(): + classes_to_check = gr.components.Component.__subclasses__() + subclasses = [] + + while classes_to_check: + subclass = classes_to_check.pop() + children = subclass.__subclasses__() + + if children: + classes_to_check.extend(children) + if ( + "value" in inspect.signature(subclass).parameters + and subclass != gr.components.IOComponent + and not getattr(subclass, "is_template", False) + ): + subclasses.append(subclass) + + return subclasses diff --git a/testbed/gradio-app__gradio/client/python/test/requirements.txt b/testbed/gradio-app__gradio/client/python/test/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..064b9322508e86e5794f4f8a71e4ac74fe2079a6 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/test/requirements.txt @@ -0,0 +1,7 @@ +black==23.3.0 +pytest-asyncio +pytest==7.1.2 +ruff==0.0.264 +pyright==1.1.305 +gradio +pydub==0.25.1 diff --git a/testbed/gradio-app__gradio/client/python/test/test_client.py b/testbed/gradio-app__gradio/client/python/test/test_client.py new file mode 100644 index 0000000000000000000000000000000000000000..2e55f177ebf88677f3ef7792987066a9b9b712ed --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/test/test_client.py @@ -0,0 +1,1108 @@ +import json +import pathlib +import tempfile +import time +import uuid +from concurrent.futures import CancelledError, TimeoutError +from contextlib import contextmanager +from datetime import datetime, timedelta +from pathlib import Path +from unittest.mock import MagicMock, patch + +import gradio as gr +import huggingface_hub +import pytest +import uvicorn +from fastapi import FastAPI +from gradio.networking import Server +from huggingface_hub.utils import RepositoryNotFoundError + +from gradio_client import Client +from gradio_client.client import DEFAULT_TEMP_DIR +from gradio_client.serializing import Serializable +from gradio_client.utils import Communicator, ProgressUnit, Status, StatusUpdate + +HF_TOKEN = "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" # Intentionally revealing this key for testing purposes + + +@contextmanager +def connect(demo: gr.Blocks, serialize: bool = True): + _, local_url, _ = demo.launch(prevent_thread_lock=True) + try: + yield Client(local_url, serialize=serialize) + finally: + # A more verbose version of .close() + # because we should set a timeout + # the tests that call .cancel() can get stuck + # waiting for the thread to join + if demo.enable_queue: + demo._queue.close() + demo.is_running = False + demo.server.should_exit = True + demo.server.thread.join(timeout=1) + + +class TestClientPredictions: + @pytest.mark.flaky + def test_raise_error_invalid_state(self): + with pytest.raises(ValueError, match="invalid state"): + Client("gradio-tests/paused-space") + + @pytest.mark.flaky + def test_numerical_to_label_space(self): + client = Client("gradio-tests/titanic-survival") + with open(client.predict("male", 77, 10, api_name="/predict")) as f: + assert json.load(f)["label"] == "Perishes" + with pytest.raises( + ValueError, + match="This Gradio app might have multiple endpoints. Please specify an `api_name` or `fn_index`", + ): + client.predict("male", 77, 10) + with pytest.raises( + ValueError, + match="Cannot find a function with `api_name`: predict. Did you mean to use a leading slash?", + ): + client.predict("male", 77, 10, api_name="predict") + + @pytest.mark.flaky + def test_private_space(self): + client = Client("gradio-tests/not-actually-private-space", hf_token=HF_TOKEN) + output = client.predict("abc", api_name="/predict") + assert output == "abc" + + def test_state(self, increment_demo): + with connect(increment_demo) as client: + output = client.predict(api_name="/increment_without_queue") + assert output == 1 + output = client.predict(api_name="/increment_without_queue") + assert output == 2 + output = client.predict(api_name="/increment_without_queue") + assert output == 3 + client.reset_session() + output = client.predict(api_name="/increment_without_queue") + assert output == 1 + output = client.predict(api_name="/increment_with_queue") + assert output == 2 + client.reset_session() + output = client.predict(api_name="/increment_with_queue") + assert output == 1 + output = client.predict(api_name="/increment_with_queue") + assert output == 2 + + def test_job_status(self, calculator_demo): + with connect(calculator_demo) as client: + statuses = [] + job = client.submit(5, "add", 4, api_name="/predict") + while not job.done(): + time.sleep(0.1) + statuses.append(job.status()) + + assert statuses + # Messages are sorted by time + assert sorted([s.time for s in statuses if s]) == [ + s.time for s in statuses if s + ] + assert sorted([s.code for s in statuses if s]) == [ + s.code for s in statuses if s + ] + + @pytest.mark.flaky + def test_job_status_queue_disabled(self, sentiment_classification_demo): + with connect(sentiment_classification_demo) as client: + statuses = [] + job = client.submit("I love the gradio python client", api_name="/classify") + while not job.done(): + time.sleep(0.02) + statuses.append(job.status()) + statuses.append(job.status()) + assert all(s.code in [Status.PROCESSING, Status.FINISHED] for s in statuses) + assert not any(s.progress_data for s in statuses) + + @pytest.mark.flaky + def test_intermediate_outputs(self, count_generator_demo): + with connect(count_generator_demo) as client: + job = client.submit(3, fn_index=0) + + while not job.done(): + time.sleep(0.1) + + assert job.outputs() == [str(i) for i in range(3)] + + outputs = [] + for o in client.submit(3, fn_index=0): + outputs.append(o) + assert outputs == [str(i) for i in range(3)] + + @pytest.mark.flaky + def test_intermediate_outputs_with_exception(self, count_generator_demo_exception): + with connect(count_generator_demo_exception) as client: + with pytest.raises(Exception): + client.predict(7, api_name="/count") + + with pytest.raises( + ValueError, match="Cannot call predict on this function" + ): + client.predict(5, api_name="/count_forever") + + def test_break_in_loop_if_error(self, calculator_demo): + with connect(calculator_demo) as client: + job = client.submit("foo", "add", 4, fn_index=0) + output = list(job) + assert output == [] + + @pytest.mark.flaky + def test_timeout(self, sentiment_classification_demo): + with pytest.raises(TimeoutError): + with connect(sentiment_classification_demo.queue()) as client: + job = client.submit(api_name="/sleep") + job.result(timeout=0.05) + + @pytest.mark.flaky + def test_timeout_no_queue(self, sentiment_classification_demo): + with pytest.raises(TimeoutError): + with connect(sentiment_classification_demo) as client: + job = client.submit(api_name="/sleep") + job.result(timeout=0.1) + + def test_raises_exception(self, calculator_demo): + with pytest.raises(Exception): + with connect(calculator_demo) as client: + job = client.submit("foo", "add", 9, fn_index=0) + job.result() + + def test_raises_exception_no_queue(self, sentiment_classification_demo): + with pytest.raises(Exception): + with connect(sentiment_classification_demo) as client: + job = client.submit([5], api_name="/sleep") + job.result() + + @pytest.mark.flaky + def test_job_output_video(self): + client = Client(src="gradio/video_component") + job = client.submit( + "https://huggingface.co/spaces/gradio/video_component/resolve/main/files/a.mp4", + fn_index=0, + ) + assert Path(job.result()).exists() + assert Path(DEFAULT_TEMP_DIR).resolve() in Path(job.result()).resolve().parents + + temp_dir = tempfile.mkdtemp() + client = Client(src="gradio/video_component", output_dir=temp_dir) + job = client.submit( + "https://huggingface.co/spaces/gradio/video_component/resolve/main/files/a.mp4", + fn_index=0, + ) + assert Path(job.result()).exists() + assert Path(temp_dir).resolve() in Path(job.result()).resolve().parents + + def test_progress_updates(self, progress_demo): + with connect(progress_demo) as client: + job = client.submit("hello", api_name="/predict") + statuses = [] + while not job.done(): + statuses.append(job.status()) + time.sleep(0.02) + assert any(s.code == Status.PROGRESS for s in statuses) + assert any(s.progress_data is not None for s in statuses) + all_progress_data = [ + p for s in statuses if s.progress_data for p in s.progress_data + ] + count = 0 + for i in range(20): + unit = ProgressUnit( + index=i, length=20, unit="steps", progress=None, desc=None + ) + count += unit in all_progress_data + assert count + + def test_cancel_from_client_queued(self, cancel_from_client_demo): + with connect(cancel_from_client_demo) as client: + start = time.time() + job = client.submit(api_name="/long") + while not job.done(): + if job.status().code == Status.STARTING: + job.cancel() + break + with pytest.raises(CancelledError): + job.result() + # The whole prediction takes 10 seconds to run + # and does not iterate. So this tests that we can cancel + # halfway through a prediction + assert time.time() - start < 10 + assert job.status().code == Status.CANCELLED + + job = client.submit(api_name="/iterate") + iteration_count = 0 + while not job.done(): + if job.status().code == Status.ITERATING: + iteration_count += 1 + if iteration_count == 3: + job.cancel() + break + time.sleep(0.5) + # Result for iterative jobs will raise there is an exception + with pytest.raises(CancelledError): + job.result() + # The whole prediction takes 10 seconds to run + # and does not iterate. So this tests that we can cancel + # halfway through a prediction + assert time.time() - start < 10 + + # Test that we did not iterate all the way to the end + assert all(o in [0, 1, 2, 3, 4, 5] for o in job.outputs()) + assert job.status().code == Status.CANCELLED + + def test_cancel_subsequent_jobs_state_reset(self, yield_demo): + with connect(yield_demo) as client: + job1 = client.submit("abcdefefadsadfs") + time.sleep(3) + job1.cancel() + + assert len(job1.outputs()) < len("abcdefefadsadfs") + assert job1.status().code == Status.CANCELLED + + job2 = client.submit("abcd") + while not job2.done(): + time.sleep(0.1) + # Ran all iterations from scratch + assert job2.status().code == Status.FINISHED + assert len(job2.outputs()) == 4 + + def test_stream_audio(self, stream_audio): + with connect(stream_audio) as client: + job1 = client.submit( + "https://gradio-builds.s3.amazonaws.com/demo-files/bark_demo.mp4", + api_name="/predict", + ) + assert Path(job1.result()).exists() + + job2 = client.submit( + "https://gradio-builds.s3.amazonaws.com/demo-files/audio_sample.wav", + api_name="/predict", + ) + assert Path(job2.result()).exists() + assert all(Path(p).exists() for p in job2.outputs()) + + @pytest.mark.flaky + def test_upload_file_private_space(self): + client = Client( + src="gradio-tests/not-actually-private-file-upload", hf_token=HF_TOKEN + ) + + with patch.object( + client.endpoints[0], "_upload", wraps=client.endpoints[0]._upload + ) as upload: + with patch.object( + client.endpoints[0], "serialize", wraps=client.endpoints[0].serialize + ) as serialize: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + f.write("Hello from private space!") + + output = client.submit( + 1, "foo", f.name, api_name="/file_upload" + ).result() + with open(output) as f: + assert f.read() == "Hello from private space!" + upload.assert_called_once() + assert all(f["is_file"] for f in serialize.return_value()) + + with patch.object( + client.endpoints[1], "_upload", wraps=client.endpoints[0]._upload + ) as upload: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + f.write("Hello from private space!") + + with open(client.submit(f.name, api_name="/upload_btn").result()) as f: + assert f.read() == "Hello from private space!" + upload.assert_called_once() + + with patch.object( + client.endpoints[2], "_upload", wraps=client.endpoints[0]._upload + ) as upload: + # `delete=False` is required for Windows compat + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f1: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f2: + f1.write("File1") + f2.write("File2") + r1, r2 = client.submit( + 3, + [f1.name, f2.name], + "hello", + api_name="/upload_multiple", + ).result() + with open(r1) as f: + assert f.read() == "File1" + with open(r2) as f: + assert f.read() == "File2" + upload.assert_called_once() + + @pytest.mark.flaky + def test_upload_file_upload_route_does_not_exist(self): + client = Client( + src="gradio-tests/not-actually-private-file-upload-old-version", + hf_token=HF_TOKEN, + ) + + with patch.object( + client.endpoints[0], "serialize", wraps=client.endpoints[0].serialize + ) as serialize: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + f.write("Hello from private space!") + + client.submit(1, "foo", f.name, fn_index=0).result() + serialize.assert_called_once_with(1, "foo", f.name) + + def test_state_without_serialize(self, stateful_chatbot): + with connect(stateful_chatbot, serialize=False) as client: + initial_history = [["", None]] + message = "Hello" + ret = client.predict(message, initial_history, api_name="/submit") + assert ret == ("", [["", None], ["Hello", "I love you"]]) + + def test_can_call_mounted_app_via_api(self): + def greet(name): + return "Hello " + name + "!" + + gradio_app = gr.Interface( + fn=greet, + inputs=gr.Textbox(lines=2, placeholder="Name Here..."), + outputs="text", + ) + + app = FastAPI() + app = gr.mount_gradio_app(app, gradio_app, path="/test/gradio") + config = uvicorn.Config( + app=app, + port=8000, + log_level="info", + ) + server = Server(config=config) + # Using the gradio Server class to not have + # to implement code again to run uvicorn in a separate thread + # However, that means we need to set this flag to prevent + # run_in_thread_from_blocking + server.started = True + try: + server.run_in_thread() + time.sleep(1) + client = Client("http://127.0.0.1:8000/test/gradio/") + assert client.predict("freddy") == "Hello freddy!" + finally: + server.thread.join(timeout=1) + + def test_predict_with_space_with_api_name_false(self): + client = Client("gradio-tests/client-bool-api-name-error") + assert client.predict("Hello!", api_name="/run") == "Hello!" + assert client.predict("Freddy", api_name="/say_hello") == "hello" + + def test_return_layout_component(self, hello_world_with_group): + with connect(hello_world_with_group) as demo: + assert demo.predict("Freddy", api_name="/greeting") == "Hello Freddy" + assert demo.predict(api_name="/show_group") == () + + def test_return_layout_and_state_components( + self, hello_world_with_state_and_accordion + ): + with connect(hello_world_with_state_and_accordion) as demo: + assert demo.predict("Freddy", api_name="/greeting") == ("Hello Freddy", 1) + assert demo.predict("Abubakar", api_name="/greeting") == ( + "Hello Abubakar", + 2, + ) + assert demo.predict(api_name="/open") == 3 + assert demo.predict(api_name="/close") == 4 + assert demo.predict("Ali", api_name="/greeting") == ("Hello Ali", 5) + + +class TestStatusUpdates: + @patch("gradio_client.client.Endpoint.make_end_to_end_fn") + def test_messages_passed_correctly(self, mock_make_end_to_end_fn): + now = datetime.now() + + messages = [ + StatusUpdate( + code=Status.STARTING, + eta=None, + rank=None, + success=None, + queue_size=None, + time=now, + progress_data=None, + ), + StatusUpdate( + code=Status.SENDING_DATA, + eta=None, + rank=None, + success=None, + queue_size=None, + time=now + timedelta(seconds=1), + progress_data=None, + ), + StatusUpdate( + code=Status.IN_QUEUE, + eta=3, + rank=2, + queue_size=2, + success=None, + time=now + timedelta(seconds=2), + progress_data=None, + ), + StatusUpdate( + code=Status.IN_QUEUE, + eta=2, + rank=1, + queue_size=1, + success=None, + time=now + timedelta(seconds=3), + progress_data=None, + ), + StatusUpdate( + code=Status.ITERATING, + eta=None, + rank=None, + queue_size=None, + success=None, + time=now + timedelta(seconds=3), + progress_data=None, + ), + StatusUpdate( + code=Status.FINISHED, + eta=None, + rank=None, + queue_size=None, + success=True, + time=now + timedelta(seconds=4), + progress_data=None, + ), + ] + + class MockEndToEndFunction: + def __init__(self, communicator: Communicator): + self.communicator = communicator + + def __call__(self, *args, **kwargs): + for m in messages: + with self.communicator.lock: + self.communicator.job.latest_status = m + time.sleep(0.1) + + mock_make_end_to_end_fn.side_effect = MockEndToEndFunction + + client = Client(src="gradio/calculator") + job = client.submit(5, "add", 6, api_name="/predict") + + statuses = [] + while not job.done(): + statuses.append(job.status()) + time.sleep(0.09) + + assert all(s in messages for s in statuses) + + @patch("gradio_client.client.Endpoint.make_end_to_end_fn") + def test_messages_correct_two_concurrent(self, mock_make_end_to_end_fn): + now = datetime.now() + + messages_1 = [ + StatusUpdate( + code=Status.STARTING, + eta=None, + rank=None, + success=None, + queue_size=None, + time=now, + progress_data=None, + ), + StatusUpdate( + code=Status.FINISHED, + eta=None, + rank=None, + queue_size=None, + success=True, + time=now + timedelta(seconds=4), + progress_data=None, + ), + ] + + messages_2 = [ + StatusUpdate( + code=Status.IN_QUEUE, + eta=3, + rank=2, + queue_size=2, + success=None, + time=now + timedelta(seconds=2), + progress_data=None, + ), + StatusUpdate( + code=Status.IN_QUEUE, + eta=2, + rank=1, + queue_size=1, + success=None, + time=now + timedelta(seconds=3), + progress_data=None, + ), + ] + + class MockEndToEndFunction: + n_counts = 0 + + def __init__(self, communicator: Communicator): + self.communicator = communicator + self.messages = ( + messages_1 if MockEndToEndFunction.n_counts == 0 else messages_2 + ) + MockEndToEndFunction.n_counts += 1 + + def __call__(self, *args, **kwargs): + for m in self.messages: + with self.communicator.lock: + print(f"here: {m}") + self.communicator.job.latest_status = m + time.sleep(0.1) + + mock_make_end_to_end_fn.side_effect = MockEndToEndFunction + + client = Client(src="gradio/calculator") + job_1 = client.submit(5, "add", 6, api_name="/predict") + job_2 = client.submit(11, "subtract", 1, api_name="/predict") + + statuses_1 = [] + statuses_2 = [] + while not (job_1.done() and job_2.done()): + statuses_1.append(job_1.status()) + statuses_2.append(job_2.status()) + time.sleep(0.05) + + assert all(s in messages_1 for s in statuses_1) + + +class TestAPIInfo: + @pytest.mark.parametrize("trailing_char", ["/", ""]) + def test_test_endpoint_src(self, trailing_char): + src = "https://gradio-calculator.hf.space" + trailing_char + client = Client(src=src) + assert client.endpoints[0].root_url == "https://gradio-calculator.hf.space/" + + @pytest.mark.flaky + def test_numerical_to_label_space(self): + client = Client("gradio-tests/titanic-survival") + assert client.view_api(return_format="dict") == { + "named_endpoints": { + "/predict": { + "parameters": [ + { + "label": "Sex", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Radio", + "example_input": "Howdy!", + "serializer": "StringSerializable", + }, + { + "label": "Age", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Slider", + "example_input": 5, + "serializer": "NumberSerializable", + }, + { + "label": "Fare (british pounds)", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Slider", + "example_input": 5, + "serializer": "NumberSerializable", + }, + ], + "returns": [ + { + "label": "output", + "type": {"type": {}, "description": "any valid json"}, + "python_type": { + "type": "str", + "description": "filepath to JSON file", + }, + "component": "Label", + "serializer": "JSONSerializable", + } + ], + }, + "/predict_1": { + "parameters": [ + { + "label": "Sex", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Radio", + "example_input": "Howdy!", + "serializer": "StringSerializable", + }, + { + "label": "Age", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Slider", + "example_input": 5, + "serializer": "NumberSerializable", + }, + { + "label": "Fare (british pounds)", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Slider", + "example_input": 5, + "serializer": "NumberSerializable", + }, + ], + "returns": [ + { + "label": "output", + "type": {"type": {}, "description": "any valid json"}, + "python_type": { + "type": "str", + "description": "filepath to JSON file", + }, + "component": "Label", + "serializer": "JSONSerializable", + } + ], + }, + "/predict_2": { + "parameters": [ + { + "label": "Sex", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Radio", + "example_input": "Howdy!", + "serializer": "StringSerializable", + }, + { + "label": "Age", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Slider", + "example_input": 5, + "serializer": "NumberSerializable", + }, + { + "label": "Fare (british pounds)", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Slider", + "example_input": 5, + "serializer": "NumberSerializable", + }, + ], + "returns": [ + { + "label": "output", + "type": {"type": {}, "description": "any valid json"}, + "python_type": { + "type": "str", + "description": "filepath to JSON file", + }, + "component": "Label", + "serializer": "JSONSerializable", + } + ], + }, + }, + "unnamed_endpoints": {}, + } + + def test_serializable_in_mapping(self, calculator_demo): + with connect(calculator_demo) as client: + assert all( + isinstance(c, Serializable) for c in client.endpoints[0].serializers + ) + + def test_state_does_not_appear(self, state_demo): + with connect(state_demo) as client: + api_info = client.view_api(return_format="dict") + assert isinstance(api_info, dict) + for parameter in api_info["named_endpoints"]["/predict"]["parameters"]: + assert parameter["component"] != "State" + + @pytest.mark.flaky + def test_private_space(self): + client = Client("gradio-tests/not-actually-private-space", hf_token=HF_TOKEN) + assert len(client.endpoints) == 3 + assert len([e for e in client.endpoints if e.is_valid]) == 2 + assert len([e for e in client.endpoints if e.is_valid and e.api_name]) == 1 + assert client.view_api(return_format="dict") == { + "named_endpoints": { + "/predict": { + "parameters": [ + { + "label": "x", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Textbox", + "example_input": "Howdy!", + "serializer": "StringSerializable", + } + ], + "returns": [ + { + "label": "output", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Textbox", + "serializer": "StringSerializable", + } + ], + } + }, + "unnamed_endpoints": {}, + } + + @pytest.mark.flaky + def test_fetch_fixed_version_space(self): + assert Client("gradio-tests/calculator").view_api(return_format="dict") == { + "named_endpoints": { + "/predict": { + "parameters": [ + { + "label": "num1", + "type": {"type": "number"}, + "python_type": {"type": "int | float", "description": ""}, + "component": "Number", + "example_input": 5, + "serializer": "NumberSerializable", + }, + { + "label": "operation", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Radio", + "example_input": "add", + "serializer": "StringSerializable", + }, + { + "label": "num2", + "type": {"type": "number"}, + "python_type": {"type": "int | float", "description": ""}, + "component": "Number", + "example_input": 5, + "serializer": "NumberSerializable", + }, + ], + "returns": [ + { + "label": "output", + "type": {"type": "number"}, + "python_type": {"type": "int | float", "description": ""}, + "component": "Number", + "serializer": "NumberSerializable", + } + ], + } + }, + "unnamed_endpoints": {}, + } + + def test_unnamed_endpoints_use_fn_index(self, count_generator_demo): + with connect(count_generator_demo) as client: + info = client.view_api(return_format="str") + assert "fn_index=0" in info + assert "api_name" not in info + + def test_api_false_endpoints_do_not_appear(self, count_generator_demo): + with connect(count_generator_demo) as client: + info = client.view_api(return_format="dict") + assert len(info["named_endpoints"]) == 0 + assert len(info["unnamed_endpoints"]) == 2 + + def test_api_false_endpoints_cannot_be_accessed_with_fn_index(self, increment_demo): + with connect(increment_demo) as client: + with pytest.raises(ValueError): + client.submit(1, fn_index=2) + + def test_file_io(self, file_io_demo): + with connect(file_io_demo) as client: + info = client.view_api(return_format="dict") + inputs = info["named_endpoints"]["/predict"]["parameters"] + outputs = info["named_endpoints"]["/predict"]["returns"] + + assert inputs[0]["type"]["type"] == "array" + assert inputs[0]["python_type"] == { + "type": "List[str]", + "description": "List of filepath(s) or URL(s) to files", + } + assert isinstance(inputs[0]["example_input"], list) + assert isinstance(inputs[0]["example_input"][0], str) + + assert inputs[1]["python_type"] == { + "type": "str", + "description": "filepath or URL to file", + } + assert isinstance(inputs[1]["example_input"], str) + + assert outputs[0]["python_type"] == { + "type": "List[str]", + "description": "List of filepath(s) or URL(s) to files", + } + assert outputs[0]["type"]["type"] == "array" + + assert outputs[1]["python_type"] == { + "type": "str", + "description": "filepath or URL to file", + } + + def test_layout_components_in_output(self, hello_world_with_group): + with connect(hello_world_with_group) as client: + info = client.view_api(return_format="dict") + assert info == { + "named_endpoints": { + "/greeting": { + "parameters": [ + { + "label": "name", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Textbox", + "example_input": "Howdy!", + "serializer": "StringSerializable", + } + ], + "returns": [ + { + "label": "greeting", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Textbox", + "serializer": "StringSerializable", + } + ], + }, + "/show_group": {"parameters": [], "returns": []}, + }, + "unnamed_endpoints": {}, + } + assert info["named_endpoints"]["/show_group"] == { + "parameters": [], + "returns": [], + } + + def test_layout_and_state_components_in_output( + self, hello_world_with_state_and_accordion + ): + with connect(hello_world_with_state_and_accordion) as client: + info = client.view_api(return_format="dict") + assert info == { + "named_endpoints": { + "/greeting": { + "parameters": [ + { + "label": "name", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Textbox", + "example_input": "Howdy!", + "serializer": "StringSerializable", + } + ], + "returns": [ + { + "label": "greeting", + "type": {"type": "string"}, + "python_type": {"type": "str", "description": ""}, + "component": "Textbox", + "serializer": "StringSerializable", + }, + { + "label": "count", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Number", + "serializer": "NumberSerializable", + }, + ], + }, + "/open": { + "parameters": [], + "returns": [ + { + "label": "count", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Number", + "serializer": "NumberSerializable", + } + ], + }, + "/close": { + "parameters": [], + "returns": [ + { + "label": "count", + "type": {"type": "number"}, + "python_type": { + "type": "int | float", + "description": "", + }, + "component": "Number", + "serializer": "NumberSerializable", + } + ], + }, + }, + "unnamed_endpoints": {}, + } + + +class TestEndpoints: + def test_upload(self): + client = Client( + src="gradio-tests/not-actually-private-file-upload", hf_token=HF_TOKEN + ) + response = MagicMock(status_code=200) + response.json.return_value = [ + "file1", + "file2", + "file3", + "file4", + "file5", + "file6", + "file7", + ] + with patch("requests.post", MagicMock(return_value=response)): + with patch("builtins.open", MagicMock()): + with patch.object(pathlib.Path, "name") as mock_name: + mock_name.side_effect = lambda x: x + results = client.endpoints[0]._upload( + ["pre1", ["pre2", "pre3", "pre4"], ["pre5", "pre6"], "pre7"] + ) + + res = [] + for re in results: + if isinstance(re, list): + res.append([r["name"] for r in re]) + else: + res.append(re["name"]) + + assert res == [ + "file1", + ["file2", "file3", "file4"], + ["file5", "file6"], + "file7", + ] + + +cpu = huggingface_hub.SpaceHardware.CPU_BASIC + + +class TestDuplication: + @pytest.mark.flaky + @patch("huggingface_hub.get_space_runtime", return_value=MagicMock(hardware=cpu)) + @patch("gradio_client.client.Client.__init__", return_value=None) + def test_new_space_id(self, mock_init, mock_runtime): + Client.duplicate("gradio/calculator", "test", hf_token=HF_TOKEN) + mock_runtime.assert_any_call("gradio/calculator", token=HF_TOKEN) + mock_runtime.assert_any_call("gradio-tests/test", token=HF_TOKEN) + mock_init.assert_called_with( + "gradio-tests/test", hf_token=HF_TOKEN, max_workers=40, verbose=True + ) + Client.duplicate("gradio/calculator", "gradio-tests/test", hf_token=HF_TOKEN) + mock_runtime.assert_any_call("gradio/calculator", token=HF_TOKEN) + mock_runtime.assert_any_call("gradio-tests/test", token=HF_TOKEN) + mock_init.assert_called_with( + "gradio-tests/test", hf_token=HF_TOKEN, max_workers=40, verbose=True + ) + + @pytest.mark.flaky + @patch("gradio_client.utils.set_space_timeout") + @patch("huggingface_hub.get_space_runtime", return_value=MagicMock(hardware=cpu)) + @patch("gradio_client.client.Client.__init__", return_value=None) + def test_dont_set_timeout_if_default_hardware( + self, mock_init, mock_runtime, mock_set_timeout + ): + Client.duplicate("gradio/calculator", "test", hf_token=HF_TOKEN) + mock_set_timeout.assert_not_called() + + @pytest.mark.flaky + @patch("huggingface_hub.request_space_hardware") + @patch("gradio_client.utils.set_space_timeout") + @patch( + "huggingface_hub.get_space_runtime", + return_value=MagicMock(hardware=huggingface_hub.SpaceHardware.CPU_UPGRADE), + ) + @patch("gradio_client.client.Client.__init__", return_value=None) + def test_set_timeout_if_not_default_hardware( + self, mock_init, mock_runtime, mock_set_timeout, mock_request_hardware + ): + Client.duplicate( + "gradio/calculator", + "test", + hf_token=HF_TOKEN, + hardware="cpu-upgrade", + sleep_timeout=15, + ) + mock_set_timeout.assert_called_once_with( + "gradio-tests/test", hf_token=HF_TOKEN, timeout_in_seconds=15 * 60 + ) + + @pytest.mark.flaky + @patch("huggingface_hub.get_space_runtime", return_value=MagicMock(hardware=cpu)) + @patch("gradio_client.client.Client.__init__", return_value=None) + def test_default_space_id(self, mock_init, mock_runtime): + Client.duplicate("gradio/calculator", hf_token=HF_TOKEN) + mock_runtime.assert_any_call("gradio/calculator", token=HF_TOKEN) + mock_runtime.assert_any_call("gradio-tests/calculator", token=HF_TOKEN) + mock_init.assert_called_with( + "gradio-tests/calculator", hf_token=HF_TOKEN, max_workers=40, verbose=True + ) + + @pytest.mark.flaky + @patch("huggingface_hub.add_space_secret") + @patch("huggingface_hub.duplicate_space") + @patch("gradio_client.client.Client.__init__", return_value=None) + @patch("gradio_client.utils.set_space_timeout") + def test_add_secrets(self, mock_time, mock_init, mock_duplicate, mock_add_secret): + with pytest.raises(RepositoryNotFoundError): + name = str(uuid.uuid4()) + Client.duplicate( + "gradio/calculator", + name, + hf_token=HF_TOKEN, + secrets={"test_key": "test_value", "test_key2": "test_value2"}, + ) + mock_add_secret.assert_called_with( + f"gradio-tests/{name}", + "test_key", + "test_value", + token=HF_TOKEN, + ) + mock_add_secret.assert_any_call( + f"gradio-tests/{name}", + "test_key2", + "test_value2", + token=HF_TOKEN, + ) diff --git a/testbed/gradio-app__gradio/client/python/test/test_documentation.py b/testbed/gradio-app__gradio/client/python/test/test_documentation.py new file mode 100644 index 0000000000000000000000000000000000000000..d73a9e159b5b9904c38cafa5311fbf8fff3de395 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/test/test_documentation.py @@ -0,0 +1,7 @@ +from gradio_client import documentation + + +class TestDocumentation: + def test_website_documentation(self): + docs = documentation.generate_documentation() + assert len(docs) > 0 diff --git a/testbed/gradio-app__gradio/client/python/test/test_serializing.py b/testbed/gradio-app__gradio/client/python/test/test_serializing.py new file mode 100644 index 0000000000000000000000000000000000000000..8bf4fb104498878eb43bd8e8b73904fdc7ed1eb3 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/test/test_serializing.py @@ -0,0 +1,60 @@ +import os +import tempfile + +import pytest +from gradio import components + +from gradio_client.serializing import COMPONENT_MAPPING, FileSerializable, Serializable +from gradio_client.utils import SKIP_COMPONENTS, encode_url_or_file_to_base64 + + +@pytest.mark.parametrize("serializer_class", Serializable.__subclasses__()) +def test_duplicate(serializer_class): + if "gradio_client" not in serializer_class.__module__: + pytest.skip(f"{serializer_class} not defined in gradio_client") + serializer = serializer_class() + info = serializer.api_info() + assert "info" in info and "serialized_info" in info + if "serialized_info" in info: + assert serializer.serialized_info() + + +def test_check_component_fallback_serializers(): + for component_name, class_type in COMPONENT_MAPPING.items(): + # skip components that cannot be instantiated without parameters + if component_name in SKIP_COMPONENTS: + continue + component = components.get_component_instance(component_name) + assert isinstance(component, class_type) + + +def test_all_components_in_component_mapping(all_components): + for component in all_components: + assert component.__name__.lower() in COMPONENT_MAPPING + + +def test_file_serializing(): + try: + serializing = FileSerializable() + with tempfile.NamedTemporaryFile(delete=False, mode="w") as f1: + with tempfile.NamedTemporaryFile(delete=False, mode="w") as f2: + f1.write("Hello World!") + f2.write("Greetings!") + + output = serializing.serialize(f1.name) + assert output["data"] == encode_url_or_file_to_base64(f1.name) + output = serializing.serialize([f1.name, f2.name]) + assert output[0]["data"] == encode_url_or_file_to_base64(f1.name) + assert output[1]["data"] == encode_url_or_file_to_base64(f2.name) + + # no-op for dict + assert serializing.serialize(output) == output + + files = serializing.deserialize(output) + with open(files[0]) as f: + assert f.read() == "Hello World!" + with open(files[1]) as f: + assert f.read() == "Greetings!" + finally: + os.remove(f1.name) + os.remove(f2.name) diff --git a/testbed/gradio-app__gradio/client/python/test/test_utils.py b/testbed/gradio-app__gradio/client/python/test/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..21ceb0c12f05dfc75d83d03ba7726fbb3dbc2891 --- /dev/null +++ b/testbed/gradio-app__gradio/client/python/test/test_utils.py @@ -0,0 +1,180 @@ +import importlib.resources +import json +import tempfile +from copy import deepcopy +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +import requests + +from gradio_client import media_data, utils + +types = json.loads(importlib.resources.read_text("gradio_client", "types.json")) +types["MultipleFile"] = { + "type": "array", + "items": {"type": "string", "description": "filepath or URL to file"}, +} +types["SingleFile"] = {"type": "string", "description": "filepath or URL to file"} + + +def test_encode_url_or_file_to_base64(): + output_base64 = utils.encode_url_or_file_to_base64( + Path(__file__).parent / "../../../gradio/test_data/test_image.png" + ) + assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + + +def test_encode_file_to_base64(): + output_base64 = utils.encode_file_to_base64( + Path(__file__).parent / "../../../gradio/test_data/test_image.png" + ) + assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + + +@pytest.mark.flaky +def test_encode_url_to_base64(): + output_base64 = utils.encode_url_to_base64( + "https://raw.githubusercontent.com/gradio-app/gradio/main/gradio/test_data/test_image.png" + ) + assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + + +def test_encode_url_to_base64_doesnt_encode_errors(monkeypatch): + error_response = requests.Response() + error_response.status_code = 404 + monkeypatch.setattr(requests, "get", lambda *args, **kwargs: error_response) + with pytest.raises(requests.RequestException): + utils.encode_url_to_base64("https://example.com/foo") + + +def test_decode_base64_to_binary(): + binary = utils.decode_base64_to_binary(deepcopy(media_data.BASE64_IMAGE)) + assert deepcopy(media_data.BINARY_IMAGE) == binary + + b64_img_without_header = deepcopy(media_data.BASE64_IMAGE).split(",")[1] + binary_without_header, extension = utils.decode_base64_to_binary( + b64_img_without_header + ) + + assert binary[0] == binary_without_header + assert extension is None + + +def test_decode_base64_to_file(): + temp_file = utils.decode_base64_to_file(deepcopy(media_data.BASE64_IMAGE)) + assert isinstance(temp_file, tempfile._TemporaryFileWrapper) + + +def test_download_private_file(): + url_path = "https://gradio-tests-not-actually-private-space.hf.space/file=lion.jpg" + hf_token = "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" # Intentionally revealing this key for testing purposes + file = utils.download_tmp_copy_of_file(url_path=url_path, hf_token=hf_token) + assert Path(file).name.endswith(".jpg") + + +def test_download_tmp_copy_of_file_does_not_save_errors(monkeypatch): + error_response = requests.Response() + error_response.status_code = 404 + error_response.close = lambda: 0 # Mock close method to avoid unrelated exception + monkeypatch.setattr(requests, "get", lambda *args, **kwargs: error_response) + with pytest.raises(requests.RequestException): + utils.download_tmp_copy_of_file("https://example.com/foo") + + +@pytest.mark.parametrize( + "orig_filename, new_filename", + [ + ("abc", "abc"), + ("$$AAabc&3", "AAabc3"), + ("$$AAabc&3", "AAabc3"), + ("$$AAa..b-c&3_", "AAa..b-c3_"), + ("$$AAa..b-c&3_", "AAa..b-c3_"), + ( + "ゆかりです。私、こんなかわいい服は初めて着ました…。なんだかうれしくって、楽しいです。歌いたくなる気分って、初めてです。これがアイドルってことなのかもしれませんね", + "ゆかりです私こんなかわいい服は初めて着ましたなんだかうれしくって楽しいです歌いたくなる気分って初めてですこれがアイドルってことなの", + ), + ], +) +def test_strip_invalid_filename_characters(orig_filename, new_filename): + assert utils.strip_invalid_filename_characters(orig_filename) == new_filename + + +class AsyncMock(MagicMock): + async def __call__(self, *args, **kwargs): + return super(AsyncMock, self).__call__(*args, **kwargs) + + +@pytest.mark.asyncio +async def test_get_pred_from_ws(): + mock_ws = AsyncMock(name="ws") + messages = [ + json.dumps({"msg": "estimation"}), + json.dumps({"msg": "send_data"}), + json.dumps({"msg": "process_generating"}), + json.dumps({"msg": "process_completed", "output": {"data": ["result!"]}}), + ] + mock_ws.recv.side_effect = messages + data = json.dumps({"data": ["foo"], "fn_index": "foo"}) + hash_data = json.dumps({"session_hash": "daslskdf", "fn_index": "foo"}) + output = await utils.get_pred_from_ws(mock_ws, data, hash_data) + assert output == {"data": ["result!"]} + mock_ws.send.assert_called_once_with(data) + + +@pytest.mark.asyncio +async def test_get_pred_from_ws_raises_if_queue_full(): + mock_ws = AsyncMock(name="ws") + messages = [json.dumps({"msg": "queue_full"})] + mock_ws.recv.side_effect = messages + data = json.dumps({"data": ["foo"], "fn_index": "foo"}) + hash_data = json.dumps({"session_hash": "daslskdf", "fn_index": "foo"}) + with pytest.raises(utils.QueueError, match="Queue is full!"): + await utils.get_pred_from_ws(mock_ws, data, hash_data) + + +@patch("requests.post") +def test_sleep_successful(mock_post): + utils.set_space_timeout("gradio/calculator") + + +@patch( + "requests.post", + return_value=MagicMock(raise_for_status=MagicMock(side_effect=requests.HTTPError)), +) +def test_sleep_unsuccessful(mock_post): + with pytest.raises(utils.SpaceDuplicationError): + utils.set_space_timeout("gradio/calculator") + + +@pytest.mark.parametrize("schema", types) +def test_json_schema_to_python_type(schema): + if schema == "SimpleSerializable": + answer = "Any" + elif schema == "StringSerializable": + answer = "str" + elif schema == "ListStringSerializable": + answer = "List[str]" + elif schema == "BooleanSerializable": + answer = "bool" + elif schema == "NumberSerializable": + answer = "int | float" + elif schema == "ImgSerializable": + answer = "str" + elif schema == "FileSerializable": + answer = "str | Dict(name: str (name of file), data: str (base64 representation of file), size: int (size of image in bytes), is_file: bool (true if the file has been uploaded to the server), orig_name: str (original name of the file)) | List[str | Dict(name: str (name of file), data: str (base64 representation of file), size: int (size of image in bytes), is_file: bool (true if the file has been uploaded to the server), orig_name: str (original name of the file))]" + elif schema == "JSONSerializable": + answer = "Dict[Any, Any]" + elif schema == "GallerySerializable": + answer = "Tuple[Dict(name: str (name of file), data: str (base64 representation of file), size: int (size of image in bytes), is_file: bool (true if the file has been uploaded to the server), orig_name: str (original name of the file)), str | None]" + elif schema == "SingleFileSerializable": + answer = "str | Dict(name: str (name of file), data: str (base64 representation of file), size: int (size of image in bytes), is_file: bool (true if the file has been uploaded to the server), orig_name: str (original name of the file))" + elif schema == "MultipleFileSerializable": + answer = "List[str | Dict(name: str (name of file), data: str (base64 representation of file), size: int (size of image in bytes), is_file: bool (true if the file has been uploaded to the server), orig_name: str (original name of the file))]" + elif schema == "SingleFile": + answer = "str" + elif schema == "MultipleFile": + answer = "List[str]" + else: + raise ValueError(f"This test has not been modified to check {schema}") + assert utils.json_schema_to_python_type(types[schema]) == answer diff --git a/testbed/gradio-app__gradio/demo/__init__.py b/testbed/gradio-app__gradio/demo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/gradio-app__gradio/demo/altair_plot/requirements.txt b/testbed/gradio-app__gradio/demo/altair_plot/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a45a80196201251d44186498ef2501d6095ca50 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/altair_plot/requirements.txt @@ -0,0 +1,2 @@ +altair +vega_datasets \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/altair_plot/run.ipynb b/testbed/gradio-app__gradio/demo/altair_plot/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..d98c209bd0267f194ef8982ec96b8a37d500e82d --- /dev/null +++ b/testbed/gradio-app__gradio/demo/altair_plot/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: altair_plot"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio altair vega_datasets"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import altair as alt\n", "import gradio as gr\n", "import numpy as np\n", "import pandas as pd\n", "from vega_datasets import data\n", "\n", "\n", "def make_plot(plot_type):\n", " if plot_type == \"scatter_plot\":\n", " cars = data.cars()\n", " return alt.Chart(cars).mark_point().encode(\n", " x='Horsepower',\n", " y='Miles_per_Gallon',\n", " color='Origin',\n", " )\n", " elif plot_type == \"heatmap\":\n", " # Compute x^2 + y^2 across a 2D grid\n", " x, y = np.meshgrid(range(-5, 5), range(-5, 5))\n", " z = x ** 2 + y ** 2\n", "\n", " # Convert this grid to columnar data expected by Altair\n", " source = pd.DataFrame({'x': x.ravel(),\n", " 'y': y.ravel(),\n", " 'z': z.ravel()})\n", " return alt.Chart(source).mark_rect().encode(\n", " x='x:O',\n", " y='y:O',\n", " color='z:Q'\n", " )\n", " elif plot_type == \"us_map\":\n", " states = alt.topo_feature(data.us_10m.url, 'states')\n", " source = data.income.url\n", "\n", " return alt.Chart(source).mark_geoshape().encode(\n", " shape='geo:G',\n", " color='pct:Q',\n", " tooltip=['name:N', 'pct:Q'],\n", " facet=alt.Facet('group:N', columns=2),\n", " ).transform_lookup(\n", " lookup='id',\n", " from_=alt.LookupData(data=states, key='id'),\n", " as_='geo'\n", " ).properties(\n", " width=300,\n", " height=175,\n", " ).project(\n", " type='albersUsa'\n", " )\n", " elif plot_type == \"interactive_barplot\":\n", " source = data.movies.url\n", "\n", " pts = alt.selection(type=\"single\", encodings=['x'])\n", "\n", " rect = alt.Chart(data.movies.url).mark_rect().encode(\n", " alt.X('IMDB_Rating:Q', bin=True),\n", " alt.Y('Rotten_Tomatoes_Rating:Q', bin=True),\n", " alt.Color('count()',\n", " scale=alt.Scale(scheme='greenblue'),\n", " legend=alt.Legend(title='Total Records')\n", " )\n", " )\n", "\n", " circ = rect.mark_point().encode(\n", " alt.ColorValue('grey'),\n", " alt.Size('count()',\n", " legend=alt.Legend(title='Records in Selection')\n", " )\n", " ).transform_filter(\n", " pts\n", " )\n", "\n", " bar = alt.Chart(source).mark_bar().encode(\n", " x='Major_Genre:N',\n", " y='count()',\n", " color=alt.condition(pts, alt.ColorValue(\"steelblue\"), alt.ColorValue(\"grey\"))\n", " ).properties(\n", " width=550,\n", " height=200\n", " ).add_selection(pts)\n", "\n", " plot = alt.vconcat(\n", " rect + circ,\n", " bar\n", " ).resolve_legend(\n", " color=\"independent\",\n", " size=\"independent\"\n", " )\n", " return plot\n", " elif plot_type == \"radial\":\n", " source = pd.DataFrame({\"values\": [12, 23, 47, 6, 52, 19]})\n", "\n", " base = alt.Chart(source).encode(\n", " theta=alt.Theta(\"values:Q\", stack=True),\n", " radius=alt.Radius(\"values\", scale=alt.Scale(type=\"sqrt\", zero=True, rangeMin=20)),\n", " color=\"values:N\",\n", " )\n", "\n", " c1 = base.mark_arc(innerRadius=20, stroke=\"#fff\")\n", "\n", " c2 = base.mark_text(radiusOffset=10).encode(text=\"values:Q\")\n", "\n", " return c1 + c2\n", " elif plot_type == \"multiline\":\n", " source = data.stocks()\n", "\n", " highlight = alt.selection(type='single', on='mouseover',\n", " fields=['symbol'], nearest=True)\n", "\n", " base = alt.Chart(source).encode(\n", " x='date:T',\n", " y='price:Q',\n", " color='symbol:N'\n", " )\n", "\n", " points = base.mark_circle().encode(\n", " opacity=alt.value(0)\n", " ).add_selection(\n", " highlight\n", " ).properties(\n", " width=600\n", " )\n", "\n", " lines = base.mark_line().encode(\n", " size=alt.condition(~highlight, alt.value(1), alt.value(3))\n", " )\n", "\n", " return points + lines\n", "\n", "\n", "with gr.Blocks() as demo:\n", " button = gr.Radio(label=\"Plot type\",\n", " choices=['scatter_plot', 'heatmap', 'us_map',\n", " 'interactive_barplot', \"radial\", \"multiline\"], value='scatter_plot')\n", " plot = gr.Plot(label=\"Plot\")\n", " button.change(make_plot, inputs=button, outputs=[plot])\n", " demo.load(make_plot, inputs=[button], outputs=[plot])\n", "\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/altair_plot/run.py b/testbed/gradio-app__gradio/demo/altair_plot/run.py new file mode 100644 index 0000000000000000000000000000000000000000..2fd137fae7f6184d2cee3ee96b61464cc0c5c5c3 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/altair_plot/run.py @@ -0,0 +1,140 @@ +import altair as alt +import gradio as gr +import numpy as np +import pandas as pd +from vega_datasets import data + + +def make_plot(plot_type): + if plot_type == "scatter_plot": + cars = data.cars() + return alt.Chart(cars).mark_point().encode( + x='Horsepower', + y='Miles_per_Gallon', + color='Origin', + ) + elif plot_type == "heatmap": + # Compute x^2 + y^2 across a 2D grid + x, y = np.meshgrid(range(-5, 5), range(-5, 5)) + z = x ** 2 + y ** 2 + + # Convert this grid to columnar data expected by Altair + source = pd.DataFrame({'x': x.ravel(), + 'y': y.ravel(), + 'z': z.ravel()}) + return alt.Chart(source).mark_rect().encode( + x='x:O', + y='y:O', + color='z:Q' + ) + elif plot_type == "us_map": + states = alt.topo_feature(data.us_10m.url, 'states') + source = data.income.url + + return alt.Chart(source).mark_geoshape().encode( + shape='geo:G', + color='pct:Q', + tooltip=['name:N', 'pct:Q'], + facet=alt.Facet('group:N', columns=2), + ).transform_lookup( + lookup='id', + from_=alt.LookupData(data=states, key='id'), + as_='geo' + ).properties( + width=300, + height=175, + ).project( + type='albersUsa' + ) + elif plot_type == "interactive_barplot": + source = data.movies.url + + pts = alt.selection(type="single", encodings=['x']) + + rect = alt.Chart(data.movies.url).mark_rect().encode( + alt.X('IMDB_Rating:Q', bin=True), + alt.Y('Rotten_Tomatoes_Rating:Q', bin=True), + alt.Color('count()', + scale=alt.Scale(scheme='greenblue'), + legend=alt.Legend(title='Total Records') + ) + ) + + circ = rect.mark_point().encode( + alt.ColorValue('grey'), + alt.Size('count()', + legend=alt.Legend(title='Records in Selection') + ) + ).transform_filter( + pts + ) + + bar = alt.Chart(source).mark_bar().encode( + x='Major_Genre:N', + y='count()', + color=alt.condition(pts, alt.ColorValue("steelblue"), alt.ColorValue("grey")) + ).properties( + width=550, + height=200 + ).add_selection(pts) + + plot = alt.vconcat( + rect + circ, + bar + ).resolve_legend( + color="independent", + size="independent" + ) + return plot + elif plot_type == "radial": + source = pd.DataFrame({"values": [12, 23, 47, 6, 52, 19]}) + + base = alt.Chart(source).encode( + theta=alt.Theta("values:Q", stack=True), + radius=alt.Radius("values", scale=alt.Scale(type="sqrt", zero=True, rangeMin=20)), + color="values:N", + ) + + c1 = base.mark_arc(innerRadius=20, stroke="#fff") + + c2 = base.mark_text(radiusOffset=10).encode(text="values:Q") + + return c1 + c2 + elif plot_type == "multiline": + source = data.stocks() + + highlight = alt.selection(type='single', on='mouseover', + fields=['symbol'], nearest=True) + + base = alt.Chart(source).encode( + x='date:T', + y='price:Q', + color='symbol:N' + ) + + points = base.mark_circle().encode( + opacity=alt.value(0) + ).add_selection( + highlight + ).properties( + width=600 + ) + + lines = base.mark_line().encode( + size=alt.condition(~highlight, alt.value(1), alt.value(3)) + ) + + return points + lines + + +with gr.Blocks() as demo: + button = gr.Radio(label="Plot type", + choices=['scatter_plot', 'heatmap', 'us_map', + 'interactive_barplot', "radial", "multiline"], value='scatter_plot') + plot = gr.Plot(label="Plot") + button.change(make_plot, inputs=button, outputs=[plot]) + demo.load(make_plot, inputs=[button], outputs=[plot]) + + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/blocks_component_shortcut/run.ipynb b/testbed/gradio-app__gradio/demo/blocks_component_shortcut/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..95c99c765e5add62f8ab3c0636fe3bfc702696d3 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_component_shortcut/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: blocks_component_shortcut"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "\n", "def greet(str):\n", " return str\n", "\n", "\n", "with gr.Blocks() as demo:\n", " \"\"\"\n", " You can make use of str shortcuts you use in Interface within Blocks as well.\n", " \n", " Interface shortcut example:\n", " Interface(greet, \"textarea\", \"textarea\")\n", " \n", " You can use \n", " 1. gr.component()\n", " 2. gr.templates.Template()\n", " 3. gr.Template()\n", " All the templates are listed in gradio/templates.py\n", " \"\"\"\n", " with gr.Row():\n", " text1 = gr.component(\"textarea\")\n", " text2 = gr.TextArea()\n", " text3 = gr.templates.TextArea()\n", " text1.blur(greet, text1, text2)\n", " text2.blur(greet, text2, text3)\n", " text3.blur(greet, text3, text1)\n", " button = gr.component(\"button\")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/blocks_component_shortcut/run.py b/testbed/gradio-app__gradio/demo/blocks_component_shortcut/run.py new file mode 100644 index 0000000000000000000000000000000000000000..6e0b2f6a33001f69eecb813d327b7e1d9e804d39 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_component_shortcut/run.py @@ -0,0 +1,31 @@ +import gradio as gr + + +def greet(str): + return str + + +with gr.Blocks() as demo: + """ + You can make use of str shortcuts you use in Interface within Blocks as well. + + Interface shortcut example: + Interface(greet, "textarea", "textarea") + + You can use + 1. gr.component() + 2. gr.templates.Template() + 3. gr.Template() + All the templates are listed in gradio/templates.py + """ + with gr.Row(): + text1 = gr.component("textarea") + text2 = gr.TextArea() + text3 = gr.templates.TextArea() + text1.blur(greet, text1, text2) + text2.blur(greet, text2, text3) + text3.blur(greet, text3, text1) + button = gr.component("button") + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/blocks_interpretation/run.ipynb b/testbed/gradio-app__gradio/demo/blocks_interpretation/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..da7d3388b667e2eedb93c7fd7cc599798cbebd4f --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_interpretation/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: blocks_interpretation"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio shap matplotlib transformers torch"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import shap\n", "from transformers import pipeline\n", "import matplotlib.pyplot as plt\n", "\n", "\n", "sentiment_classifier = pipeline(\"text-classification\", return_all_scores=True)\n", "\n", "\n", "def classifier(text):\n", " pred = sentiment_classifier(text)\n", " return {p[\"label\"]: p[\"score\"] for p in pred[0]}\n", "\n", "\n", "def interpretation_function(text):\n", " explainer = shap.Explainer(sentiment_classifier)\n", " shap_values = explainer([text])\n", " # Dimensions are (batch size, text size, number of classes)\n", " # Since we care about positive sentiment, use index 1\n", " scores = list(zip(shap_values.data[0], shap_values.values[0, :, 1]))\n", "\n", " scores_desc = sorted(scores, key=lambda t: t[1])[::-1]\n", "\n", " # Filter out empty string added by shap\n", " scores_desc = [t for t in scores_desc if t[0] != \"\"]\n", "\n", " fig_m = plt.figure()\n", " plt.bar(x=[s[0] for s in scores_desc[:5]],\n", " height=[s[1] for s in scores_desc[:5]])\n", " plt.title(\"Top words contributing to positive sentiment\")\n", " plt.ylabel(\"Shap Value\")\n", " plt.xlabel(\"Word\")\n", " return {\"original\": text, \"interpretation\": scores}, fig_m\n", "\n", "\n", "with gr.Blocks() as demo:\n", " with gr.Row():\n", " with gr.Column():\n", " input_text = gr.Textbox(label=\"Input Text\")\n", " with gr.Row():\n", " classify = gr.Button(\"Classify Sentiment\")\n", " interpret = gr.Button(\"Interpret\")\n", " with gr.Column():\n", " label = gr.Label(label=\"Predicted Sentiment\")\n", " with gr.Column():\n", " with gr.Tab(\"Display interpretation with built-in component\"):\n", " interpretation = gr.components.Interpretation(input_text)\n", " with gr.Tab(\"Display interpretation with plot\"):\n", " interpretation_plot = gr.Plot()\n", "\n", " classify.click(classifier, input_text, label)\n", " interpret.click(interpretation_function, input_text, [interpretation, interpretation_plot])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/blocks_interpretation/run.py b/testbed/gradio-app__gradio/demo/blocks_interpretation/run.py new file mode 100644 index 0000000000000000000000000000000000000000..467b4474cb68d3f48c0459cdbaaa9d40f7616eb9 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_interpretation/run.py @@ -0,0 +1,55 @@ +import gradio as gr +import shap +from transformers import pipeline +import matplotlib.pyplot as plt + + +sentiment_classifier = pipeline("text-classification", return_all_scores=True) + + +def classifier(text): + pred = sentiment_classifier(text) + return {p["label"]: p["score"] for p in pred[0]} + + +def interpretation_function(text): + explainer = shap.Explainer(sentiment_classifier) + shap_values = explainer([text]) + # Dimensions are (batch size, text size, number of classes) + # Since we care about positive sentiment, use index 1 + scores = list(zip(shap_values.data[0], shap_values.values[0, :, 1])) + + scores_desc = sorted(scores, key=lambda t: t[1])[::-1] + + # Filter out empty string added by shap + scores_desc = [t for t in scores_desc if t[0] != ""] + + fig_m = plt.figure() + plt.bar(x=[s[0] for s in scores_desc[:5]], + height=[s[1] for s in scores_desc[:5]]) + plt.title("Top words contributing to positive sentiment") + plt.ylabel("Shap Value") + plt.xlabel("Word") + return {"original": text, "interpretation": scores}, fig_m + + +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + input_text = gr.Textbox(label="Input Text") + with gr.Row(): + classify = gr.Button("Classify Sentiment") + interpret = gr.Button("Interpret") + with gr.Column(): + label = gr.Label(label="Predicted Sentiment") + with gr.Column(): + with gr.Tab("Display interpretation with built-in component"): + interpretation = gr.components.Interpretation(input_text) + with gr.Tab("Display interpretation with plot"): + interpretation_plot = gr.Plot() + + classify.click(classifier, input_text, label) + interpret.click(interpretation_function, input_text, [interpretation, interpretation_plot]) + +if __name__ == "__main__": + demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/blocks_kitchen_sink/run.ipynb b/testbed/gradio-app__gradio/demo/blocks_kitchen_sink/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..10128f8ff735fb597d39b2fd7cd3bb522ad9414f --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_kitchen_sink/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: blocks_kitchen_sink"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import time\n", "from os.path import abspath, join, pardir\n", "\n", "KS_FILES = abspath(join(__file__, pardir, pardir, \"kitchen_sink\", \"files\"))\n", "\n", "base_theme = gr.themes.Base()\n", "default_theme = gr.themes.Default()\n", "monochrome_theme = gr.themes.Monochrome()\n", "soft_theme = gr.themes.Soft()\n", "glass_theme = gr.themes.Glass()\n", "\n", "with gr.Blocks(theme=base_theme) as demo:\n", " gr.Markdown(\n", " \"\"\"\n", " # Blocks Kitchen Sink\n", " This is a demo of most Gradio features. Test all themes and toggle dark mode\n", " ## Elements\n", " - Use of Rows, Columns, Tabs, and Accordion\n", " - Use of Form elements: Textbox, Dropdown, Checkbox, Radio, Slider\n", " ## Other\n", " Other stuff\n", " - Buttons of variants: \"primary\", \"secondary\", \"stop\"\n", " - Embedded interface\n", " - Custom progress bar\n", " \"\"\"\n", " )\n", " toggle_dark = gr.Button(\"Toggle Dark\", scale=0)\n", " toggle_dark.click(\n", " None,\n", " _js=\"\"\"\n", " () => { \n", " document.body.classList.toggle('dark');\n", " }\n", " \"\"\",\n", " )\n", " theme_selector = gr.Radio(\n", " [\"Base\", \"Default\", \"Monochrome\", \"Soft\", \"Glass\"],\n", " value=\"Base\",\n", " label=\"Theme\",\n", " )\n", " theme_selector.change(\n", " None,\n", " theme_selector,\n", " None,\n", " _js=f\"\"\"\n", " (theme) => {{\n", " if (!document.querySelector('.theme-css')) {{\n", " var theme_elem = document.createElement('style');\n", " theme_elem.classList.add('theme-css');\n", " document.head.appendChild(theme_elem);\n", "\n", " var link_elem = document.createElement('link');\n", " link_elem.classList.add('link-css');\n", " link_elem.rel = 'stylesheet';\n", " document.head.appendChild(link_elem);\n", " }} else {{\n", " var theme_elem = document.querySelector('.theme-css');\n", " var link_elem = document.querySelector('.link-css');\n", " }}\n", " if (theme == \"Base\") {{\n", " var theme_css = `{base_theme._get_theme_css()}`;\n", " var link_css = `{base_theme._stylesheets[0]}`;\n", " }} else if (theme == \"Default\") {{\n", " var theme_css = `{default_theme._get_theme_css()}`;\n", " var link_css = `{default_theme._stylesheets[0]}`;\n", " }} else if (theme == \"Monochrome\") {{\n", " var theme_css = `{monochrome_theme._get_theme_css()}`;\n", " var link_css = `{monochrome_theme._stylesheets[0]}`;\n", " }} else if (theme == \"Soft\") {{\n", " var theme_css = `{soft_theme._get_theme_css()}`;\n", " var link_css = `{soft_theme._stylesheets[0]}`;\n", " }} else if (theme == \"Glass\") {{\n", " var theme_css = `{glass_theme._get_theme_css()}`;\n", " var link_css = `{glass_theme._stylesheets[0]}`;\n", " }}\n", " theme_elem.innerHTML = theme_css;\n", " link_elem.href = link_css;\n", " }}\n", " \"\"\",\n", " )\n", "\n", " name = gr.Textbox(\n", " label=\"Name (select)\",\n", " info=\"Full name, including middle name. No special characters.\",\n", " placeholder=\"John Doe\",\n", " value=\"John Doe\",\n", " interactive=True,\n", " )\n", "\n", " with gr.Row():\n", " slider1 = gr.Slider(label=\"Slider 1\")\n", " slider2 = gr.Slider(label=\"Slider 2\")\n", " checkboxes = gr.CheckboxGroup([\"A\", \"B\", \"C\"], label=\"Checkbox Group (select)\")\n", "\n", " with gr.Row():\n", " with gr.Column(variant=\"panel\", scale=1):\n", " gr.Markdown(\"## Panel 1\")\n", " radio = gr.Radio(\n", " [\"A\", \"B\", \"C\"],\n", " label=\"Radio (select)\",\n", " info=\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.\",\n", " )\n", " drop = gr.Dropdown([\"Option 1\", \"Option 2\", \"Option 3\"], show_label=False)\n", " drop_2 = gr.Dropdown(\n", " [\"Option A\", \"Option B\", \"Option C\"],\n", " multiselect=True,\n", " value=[\"Option A\"],\n", " label=\"Dropdown (select)\",\n", " interactive=True,\n", " )\n", " check = gr.Checkbox(label=\"Go\")\n", " with gr.Column(variant=\"panel\", scale=2):\n", " img = gr.Image(\n", " \"https://picsum.photos/536/354\",\n", " label=\"Image\",\n", " height=320,\n", " )\n", " with gr.Row():\n", " go_btn = gr.Button(\"Go\", label=\"Primary Button\", variant=\"primary\")\n", " clear_btn = gr.Button(\n", " \"Clear\", label=\"Secondary Button\", variant=\"secondary\"\n", " )\n", "\n", " def go(*args):\n", " time.sleep(3)\n", " return \"https://i.ibb.co/6BgKdSj/groot.jpg\"\n", "\n", " go_btn.click(go, [radio, drop, drop_2, check, name], img, api_name=\"go\")\n", "\n", " def clear():\n", " time.sleep(0.2)\n", " return None\n", "\n", " clear_btn.click(clear, None, img)\n", "\n", " with gr.Row():\n", " btn1 = gr.Button(\"Button 1\", size=\"sm\")\n", " btn2 = gr.UploadButton(size=\"sm\")\n", " stop_btn = gr.Button(\n", " \"Stop\", label=\"Stop Button\", variant=\"stop\", size=\"sm\"\n", " )\n", "\n", " gr.Examples(\n", " examples=[join(KS_FILES, \"lion.jpg\"), join(KS_FILES, \"tower.jpg\")],\n", " inputs=img,\n", " )\n", "\n", " gr.Examples(\n", " examples=[\n", " [\"A\", \"Option 1\", [\"Option B\"], True, join(KS_FILES, \"lion.jpg\")],\n", " [\n", " \"B\",\n", " \"Option 2\",\n", " [\"Option B\", \"Option C\"],\n", " False,\n", " join(KS_FILES, \"tower.jpg\"),\n", " ],\n", " ],\n", " inputs=[radio, drop, drop_2, check, img],\n", " label=\"Examples (select)\",\n", " )\n", "\n", " gr.Markdown(\"## Media Files\")\n", "\n", " with gr.Tabs() as tabs:\n", " with gr.Tab(\"Audio\"):\n", " with gr.Row():\n", " gr.Audio()\n", " gr.Audio(source=\"microphone\")\n", " gr.Audio(join(KS_FILES, \"cantina.wav\"))\n", " with gr.Tab(\"Other\"):\n", " # gr.Image(source=\"webcam\")\n", " gr.HTML(\n", " \"
\"\n", " )\n", " with gr.Row():\n", " dataframe = gr.Dataframe(\n", " value=[[1, 2, 3], [4, 5, 6], [7, 8, 9]], label=\"Dataframe (select)\"\n", " )\n", " gr.JSON(\n", " value={\"a\": 1, \"b\": 2, \"c\": {\"test\": \"a\", \"test2\": [1, 2, 3]}}, label=\"JSON\"\n", " )\n", " label = gr.Label(\n", " value={\"cat\": 0.7, \"dog\": 0.2, \"fish\": 0.1}, label=\"Label (select)\"\n", " )\n", " file = gr.File(label=\"File (select)\")\n", " with gr.Row():\n", " gr.ColorPicker()\n", " gr.Video(join(KS_FILES, \"world.mp4\"))\n", " gallery = gr.Gallery(\n", " [\n", " (join(KS_FILES, \"lion.jpg\"), \"lion\"),\n", " (join(KS_FILES, \"logo.png\"), \"logo\"),\n", " (join(KS_FILES, \"tower.jpg\"), \"tower\"),\n", " ],\n", " label=\"Gallery (select)\",\n", " )\n", "\n", " with gr.Row():\n", " with gr.Column(scale=2):\n", " highlight = gr.HighlightedText(\n", " [[\"The\", \"art\"], [\"dog\", \"noun\"], [\"is\", None], [\"fat\", \"adj\"]],\n", " label=\"Highlighted Text (select)\",\n", " )\n", " chatbot = gr.Chatbot([[\"Hello\", \"Hi\"]], label=\"Chatbot (select)\")\n", " chat_btn = gr.Button(\"Add messages\")\n", "\n", " def chat(history):\n", " time.sleep(2)\n", " yield [[\"How are you?\", \"I am good.\"]]\n", " time\n", "\n", " chat_btn.click(\n", " lambda history: history\n", " + [[\"How are you?\", \"I am good.\"]]\n", " + (time.sleep(2) or []),\n", " chatbot,\n", " chatbot,\n", " )\n", " with gr.Column(scale=1):\n", " with gr.Accordion(\"Select Info\"):\n", " gr.Markdown(\n", " \"Click on any part of any component with '(select)' in the label and see the SelectData data here.\"\n", " )\n", " select_index = gr.Textbox(label=\"Index\")\n", " select_value = gr.Textbox(label=\"Value\")\n", " select_selected = gr.Textbox(label=\"Selected\")\n", "\n", " selectables = [\n", " name,\n", " checkboxes,\n", " radio,\n", " drop_2,\n", " dataframe,\n", " label,\n", " file,\n", " highlight,\n", " chatbot,\n", " gallery,\n", " tabs,\n", " ]\n", "\n", " def select_data(evt: gr.SelectData):\n", " return [\n", " evt.index,\n", " evt.value,\n", " evt.selected,\n", " ]\n", "\n", " for selectable in selectables:\n", " selectable.select(\n", " select_data,\n", " None,\n", " [select_index, select_value, select_selected],\n", " )\n", "\n", " gr.Markdown(\"## Dataset Examples\")\n", "\n", " component_example_set = [\n", " (gr.Audio(render=False), join(KS_FILES, \"cantina.wav\")),\n", " (gr.Checkbox(render=False), True),\n", " (gr.CheckboxGroup(render=False, choices=[\"A\", \"B\"]), [\"A\", \"B\"]),\n", " (gr.ColorPicker(render=False), \"#FF0000\"),\n", " (gr.Dataframe(render=False), [[1, 2, 3], [4, 5, 6]]),\n", " (gr.Dropdown(render=False), \"A\"),\n", " (gr.File(render=False), join(KS_FILES, \"lion.jpg\")),\n", " (gr.HTML(render=False), \"
Test
\"),\n", " (gr.Image(render=False), join(KS_FILES, \"lion.jpg\")),\n", " (gr.Markdown(render=False), \"# Test\"),\n", " (gr.Number(render=False), 1),\n", " (gr.Radio(render=False), \"A\"),\n", " (gr.Slider(render=False), 1),\n", " (gr.Textbox(render=False), \"A\"),\n", " (gr.Video(render=False), join(KS_FILES, \"world.mp4\")),\n", " ]\n", " gr.Dataset(\n", " components=[c for c, _ in component_example_set],\n", " samples=[[e for _, e in component_example_set]],\n", " )\n", "\n", " with gr.Tabs():\n", " for c, e in component_example_set:\n", " with gr.Tab(c.__class__.__name__):\n", " gr.Dataset(components=[c], samples=[[e]] * 3)\n", "\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch(allowed_paths=[KS_FILES])\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/blocks_mask/run.ipynb b/testbed/gradio-app__gradio/demo/blocks_mask/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..47d91349e56854ec34671254b0476f5dcbcce5f4 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_mask/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: blocks_mask"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/blocks_mask/image.png\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/blocks_mask/lion.jpg\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/blocks_mask/lion.webp"]}, {"cell_type": "code", "execution_count": null, "id": 44380577570523278879349135829904343037, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "from gradio.components import Markdown as md\n", "from PIL import Image\n", "\n", "demo = gr.Blocks()\n", "\n", "io1a = gr.Interface(lambda x: x, gr.Image(), gr.Image())\n", "io1b = gr.Interface(lambda x: x, gr.Image(source=\"webcam\"), gr.Image())\n", "\n", "io2a = gr.Interface(lambda x: x, gr.Image(source=\"canvas\"), gr.Image())\n", "io2b = gr.Interface(lambda x: x, gr.Sketchpad(), gr.Image())\n", "io2c = gr.Interface(\n", " lambda x: x, gr.Image(source=\"canvas\", shape=(512, 512)), gr.Image()\n", ")\n", "\n", "io3a = gr.Interface(\n", " lambda x: [x[\"mask\"], x[\"image\"]],\n", " gr.Image(source=\"upload\", tool=\"sketch\"),\n", " [gr.Image(), gr.Image()],\n", ")\n", "\n", "io3b = gr.Interface(\n", " lambda x: [x[\"mask\"], x[\"image\"]],\n", " gr.ImageMask(),\n", " [gr.Image(), gr.Image()],\n", ")\n", "\n", "io3b2 = gr.Interface(\n", " lambda x: [x[\"mask\"], x[\"image\"]],\n", " gr.ImageMask(),\n", " [gr.Image(), gr.Image()],\n", ")\n", "\n", "io3b3 = gr.Interface(\n", " lambda x: [x[\"mask\"], x[\"image\"]],\n", " gr.ImageMask(),\n", " [gr.Image(), gr.Image()],\n", ")\n", "\n", "io3c = gr.Interface(\n", " lambda x: [x[\"mask\"], x[\"image\"]],\n", " gr.Image(source=\"webcam\", tool=\"sketch\"),\n", " [gr.Image(), gr.Image()],\n", ")\n", "\n", "io4a = gr.Interface(\n", " lambda x: x, gr.Image(source=\"canvas\", tool=\"color-sketch\"), gr.Image()\n", ")\n", "io4b = gr.Interface(lambda x: x, gr.Paint(), gr.Image())\n", "\n", "io5a = gr.Interface(\n", " lambda x: x, gr.Image(source=\"upload\", tool=\"color-sketch\"), gr.Image()\n", ")\n", "io5b = gr.Interface(lambda x: x, gr.ImagePaint(), gr.Image())\n", "io5c = gr.Interface(\n", " lambda x: x, gr.Image(source=\"webcam\", tool=\"color-sketch\"), gr.Image()\n", ")\n", "\n", "\n", "def save_image(image):\n", " image.save(\"colorede.png\")\n", " return image\n", "\n", "\n", "img = Image.new(\"RGB\", (512, 512), (150, 150, 150))\n", "img.save(\"image.png\", \"PNG\")\n", "\n", "io5d = gr.Interface(\n", " save_image,\n", " gr.Image(\"image.png\", source=\"upload\", tool=\"color-sketch\", type=\"pil\"),\n", " gr.Image(),\n", ")\n", "\n", "with demo:\n", " md(\"# Different Ways to Use the Image Input Component\")\n", " md(\n", " \"**1a. Standalone Image Upload: `gr.Interface(lambda x: x, gr.Image(), gr.Image())`**\"\n", " )\n", " io1a.render()\n", " md(\n", " \"**1b. Standalone Image from Webcam: `gr.Interface(lambda x: x, gr.Image(source='webcam'), gr.Image())`**\"\n", " )\n", " io1b.render()\n", " md(\n", " \"**2a. Black and White Sketchpad: `gr.Interface(lambda x: x, gr.Image(source='canvas'), gr.Image())`**\"\n", " )\n", " io2a.render()\n", " md(\n", " \"**2b. Black and White Sketchpad: `gr.Interface(lambda x: x, gr.Sketchpad(), gr.Image())`**\"\n", " )\n", " io2b.render()\n", " md(\"**2c. Black and White Sketchpad with `shape=(512,512)`**\")\n", " io2c.render()\n", " md(\"**3a. Binary Mask with image upload:**\")\n", " md(\n", " \"\"\"```python\n", "gr.Interface(\n", " lambda x: [x['mask'], x['image']],\n", " gr.Image(source='upload', tool='sketch'),\n", " [gr.Image(), gr.Image()],\n", ")\n", "```\n", "\"\"\"\n", " )\n", " io3a.render()\n", " md(\"**3b. Binary Mask with image upload:**\")\n", " md(\n", " \"\"\"```python\n", "gr.Interface(\n", " lambda x: [x['mask'], x['image']],\n", " gr.ImageMask(),\n", " [gr.Image(), gr.Image()],\n", ")\n", "```\n", "\"\"\"\n", " )\n", " io3b.render()\n", " md(\"**3c. Binary Mask with webcam upload:**\")\n", " md(\n", " \"\"\"```python\n", "gr.Interface(\n", " lambda x: [x['mask'], x['image']],\n", " gr.Image(source='webcam', tool='sketch'),\n", " [gr.Image(), gr.Image()],\n", ")\n", "```\n", "\"\"\"\n", " )\n", " io3c.render()\n", " md(\n", " \"**4a. Color Sketchpad: `gr.Interface(lambda x: x, gr.Image(source='canvas', tool='color-sketch'), gr.Image())`**\"\n", " )\n", " io4a.render()\n", " md(\"**4b. Color Sketchpad: `gr.Interface(lambda x: x, gr.Paint(), gr.Image())`**\")\n", " io4b.render()\n", " md(\n", " \"**5a. Color Sketchpad with image upload: `gr.Interface(lambda x: x, gr.Image(source='upload', tool='color-sketch'), gr.Image())`**\"\n", " )\n", " io5a.render()\n", " md(\n", " \"**5b. Color Sketchpad with image upload: `gr.Interface(lambda x: x, gr.ImagePaint(), gr.Image())`**\"\n", " )\n", " io5b.render()\n", " md(\n", " \"**5c. Color Sketchpad with webcam upload: `gr.Interface(lambda x: x, gr.Image(source='webcam', tool='color-sketch'), gr.Image())`**\"\n", " )\n", " io5c.render()\n", " md(\"**Tabs**\")\n", " with gr.Tab(\"One\"):\n", " io3b2.render()\n", " with gr.Tab(\"Two\"):\n", " io3b3.render()\n", " md(\"**5d. Color Sketchpad with image upload and a default images**\")\n", " io5d.render()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/blocks_mask/run.py b/testbed/gradio-app__gradio/demo/blocks_mask/run.py new file mode 100644 index 0000000000000000000000000000000000000000..93e24a12ae3cbe9adb7d83913844a45e82bfcd2c --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_mask/run.py @@ -0,0 +1,157 @@ +import gradio as gr +from gradio.components import Markdown as md +from PIL import Image + +demo = gr.Blocks() + +io1a = gr.Interface(lambda x: x, gr.Image(), gr.Image()) +io1b = gr.Interface(lambda x: x, gr.Image(source="webcam"), gr.Image()) + +io2a = gr.Interface(lambda x: x, gr.Image(source="canvas"), gr.Image()) +io2b = gr.Interface(lambda x: x, gr.Sketchpad(), gr.Image()) +io2c = gr.Interface( + lambda x: x, gr.Image(source="canvas", shape=(512, 512)), gr.Image() +) + +io3a = gr.Interface( + lambda x: [x["mask"], x["image"]], + gr.Image(source="upload", tool="sketch"), + [gr.Image(), gr.Image()], +) + +io3b = gr.Interface( + lambda x: [x["mask"], x["image"]], + gr.ImageMask(), + [gr.Image(), gr.Image()], +) + +io3b2 = gr.Interface( + lambda x: [x["mask"], x["image"]], + gr.ImageMask(), + [gr.Image(), gr.Image()], +) + +io3b3 = gr.Interface( + lambda x: [x["mask"], x["image"]], + gr.ImageMask(), + [gr.Image(), gr.Image()], +) + +io3c = gr.Interface( + lambda x: [x["mask"], x["image"]], + gr.Image(source="webcam", tool="sketch"), + [gr.Image(), gr.Image()], +) + +io4a = gr.Interface( + lambda x: x, gr.Image(source="canvas", tool="color-sketch"), gr.Image() +) +io4b = gr.Interface(lambda x: x, gr.Paint(), gr.Image()) + +io5a = gr.Interface( + lambda x: x, gr.Image(source="upload", tool="color-sketch"), gr.Image() +) +io5b = gr.Interface(lambda x: x, gr.ImagePaint(), gr.Image()) +io5c = gr.Interface( + lambda x: x, gr.Image(source="webcam", tool="color-sketch"), gr.Image() +) + + +def save_image(image): + image.save("colorede.png") + return image + + +img = Image.new("RGB", (512, 512), (150, 150, 150)) +img.save("image.png", "PNG") + +io5d = gr.Interface( + save_image, + gr.Image("image.png", source="upload", tool="color-sketch", type="pil"), + gr.Image(), +) + +with demo: + md("# Different Ways to Use the Image Input Component") + md( + "**1a. Standalone Image Upload: `gr.Interface(lambda x: x, gr.Image(), gr.Image())`**" + ) + io1a.render() + md( + "**1b. Standalone Image from Webcam: `gr.Interface(lambda x: x, gr.Image(source='webcam'), gr.Image())`**" + ) + io1b.render() + md( + "**2a. Black and White Sketchpad: `gr.Interface(lambda x: x, gr.Image(source='canvas'), gr.Image())`**" + ) + io2a.render() + md( + "**2b. Black and White Sketchpad: `gr.Interface(lambda x: x, gr.Sketchpad(), gr.Image())`**" + ) + io2b.render() + md("**2c. Black and White Sketchpad with `shape=(512,512)`**") + io2c.render() + md("**3a. Binary Mask with image upload:**") + md( + """```python +gr.Interface( + lambda x: [x['mask'], x['image']], + gr.Image(source='upload', tool='sketch'), + [gr.Image(), gr.Image()], +) +``` +""" + ) + io3a.render() + md("**3b. Binary Mask with image upload:**") + md( + """```python +gr.Interface( + lambda x: [x['mask'], x['image']], + gr.ImageMask(), + [gr.Image(), gr.Image()], +) +``` +""" + ) + io3b.render() + md("**3c. Binary Mask with webcam upload:**") + md( + """```python +gr.Interface( + lambda x: [x['mask'], x['image']], + gr.Image(source='webcam', tool='sketch'), + [gr.Image(), gr.Image()], +) +``` +""" + ) + io3c.render() + md( + "**4a. Color Sketchpad: `gr.Interface(lambda x: x, gr.Image(source='canvas', tool='color-sketch'), gr.Image())`**" + ) + io4a.render() + md("**4b. Color Sketchpad: `gr.Interface(lambda x: x, gr.Paint(), gr.Image())`**") + io4b.render() + md( + "**5a. Color Sketchpad with image upload: `gr.Interface(lambda x: x, gr.Image(source='upload', tool='color-sketch'), gr.Image())`**" + ) + io5a.render() + md( + "**5b. Color Sketchpad with image upload: `gr.Interface(lambda x: x, gr.ImagePaint(), gr.Image())`**" + ) + io5b.render() + md( + "**5c. Color Sketchpad with webcam upload: `gr.Interface(lambda x: x, gr.Image(source='webcam', tool='color-sketch'), gr.Image())`**" + ) + io5c.render() + md("**Tabs**") + with gr.Tab("One"): + io3b2.render() + with gr.Tab("Two"): + io3b3.render() + md("**5d. Color Sketchpad with image upload and a default images**") + io5d.render() + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/blocks_style/run.ipynb b/testbed/gradio-app__gradio/demo/blocks_style/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..ff59d8ba72300acf1bf44ffe64bfa3e080e99ff5 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_style/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: blocks_style"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "with gr.Blocks(title=\"Styling Examples\") as demo:\n", " with gr.Column(variant=\"box\"):\n", " txt = gr.Textbox(label=\"Small Textbox\", lines=1)\n", " num = gr.Number(label=\"Number\", show_label=False)\n", " slider = gr.Slider(label=\"Slider\", show_label=False)\n", " check = gr.Checkbox(label=\"Checkbox\", show_label=False)\n", " check_g = gr.CheckboxGroup(\n", " label=\"Checkbox Group\",\n", " choices=[\"One\", \"Two\", \"Three\"],\n", " show_label=False,\n", " )\n", " radio = gr.Radio(\n", " label=\"Radio\",\n", " choices=[\"One\", \"Two\", \"Three\"],\n", " show_label=False,\n", " )\n", " drop = gr.Dropdown(\n", " label=\"Dropdown\", choices=[\"One\", \"Two\", \"Three\"], show_label=False\n", " )\n", " image = gr.Image(show_label=False)\n", " video = gr.Video(show_label=False)\n", " audio = gr.Audio(show_label=False)\n", " file = gr.File(show_label=False)\n", " df = gr.Dataframe(show_label=False)\n", " ts = gr.Timeseries(show_label=False)\n", " label = gr.Label(container=False)\n", " highlight = gr.HighlightedText(\n", " [(\"hello\", None), (\"goodbye\", \"-\")],\n", " color_map={\"+\": \"green\", \"-\": \"red\"},\n", " container=False,\n", " )\n", " json = gr.JSON(container=False)\n", " html = gr.HTML(show_label=False)\n", " gallery = gr.Gallery(\n", " columns=(3, 3, 1),\n", " height=\"auto\",\n", " container=False,\n", " )\n", " chat = gr.Chatbot([(\"hi\", \"good bye\")])\n", "\n", " model = gr.Model3D()\n", "\n", " md = gr.Markdown(show_label=False)\n", "\n", " highlight = gr.HighlightedText()\n", "\n", " btn = gr.Button(\"Run\")\n", "\n", " gr.Dataset(components=[txt, num])\n", "\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/blocks_style/run.py b/testbed/gradio-app__gradio/demo/blocks_style/run.py new file mode 100644 index 0000000000000000000000000000000000000000..9e40fcdc2133155ce51617e7315fb5a6057b3510 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/blocks_style/run.py @@ -0,0 +1,55 @@ +import gradio as gr + +with gr.Blocks(title="Styling Examples") as demo: + with gr.Column(variant="box"): + txt = gr.Textbox(label="Small Textbox", lines=1) + num = gr.Number(label="Number", show_label=False) + slider = gr.Slider(label="Slider", show_label=False) + check = gr.Checkbox(label="Checkbox", show_label=False) + check_g = gr.CheckboxGroup( + label="Checkbox Group", + choices=["One", "Two", "Three"], + show_label=False, + ) + radio = gr.Radio( + label="Radio", + choices=["One", "Two", "Three"], + show_label=False, + ) + drop = gr.Dropdown( + label="Dropdown", choices=["One", "Two", "Three"], show_label=False + ) + image = gr.Image(show_label=False) + video = gr.Video(show_label=False) + audio = gr.Audio(show_label=False) + file = gr.File(show_label=False) + df = gr.Dataframe(show_label=False) + ts = gr.Timeseries(show_label=False) + label = gr.Label(container=False) + highlight = gr.HighlightedText( + [("hello", None), ("goodbye", "-")], + color_map={"+": "green", "-": "red"}, + container=False, + ) + json = gr.JSON(container=False) + html = gr.HTML(show_label=False) + gallery = gr.Gallery( + columns=(3, 3, 1), + height="auto", + container=False, + ) + chat = gr.Chatbot([("hi", "good bye")]) + + model = gr.Model3D() + + md = gr.Markdown(show_label=False) + + highlight = gr.HighlightedText() + + btn = gr.Button("Run") + + gr.Dataset(components=[txt, num]) + + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/clustering/DESCRIPTION.md b/testbed/gradio-app__gradio/demo/clustering/DESCRIPTION.md new file mode 100644 index 0000000000000000000000000000000000000000..f57e9f25bd22de0cb4c9625203d4b79b747bfbcb --- /dev/null +++ b/testbed/gradio-app__gradio/demo/clustering/DESCRIPTION.md @@ -0,0 +1 @@ +This demo built with Blocks generates 9 plots based on the input. \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/clustering/requirements.txt b/testbed/gradio-app__gradio/demo/clustering/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..db7c7e4468305565bc82b28927c821b04c299700 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/clustering/requirements.txt @@ -0,0 +1,2 @@ +matplotlib>=3.5.2 +scikit-learn>=1.0.1 diff --git a/testbed/gradio-app__gradio/demo/clustering/run.ipynb b/testbed/gradio-app__gradio/demo/clustering/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..08fb7d0b82f7be91b3d2ec7871037979269e6593 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/clustering/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: clustering\n", "### This demo built with Blocks generates 9 plots based on the input.\n", " "]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio matplotlib>=3.5.2 scikit-learn>=1.0.1 "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import math\n", "from functools import partial\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "from sklearn.cluster import (\n", " AgglomerativeClustering, Birch, DBSCAN, KMeans, MeanShift, OPTICS, SpectralClustering, estimate_bandwidth\n", ")\n", "from sklearn.datasets import make_blobs, make_circles, make_moons\n", "from sklearn.mixture import GaussianMixture\n", "from sklearn.neighbors import kneighbors_graph\n", "from sklearn.preprocessing import StandardScaler\n", "\n", "plt.style.use('seaborn-v0_8')\n", "SEED = 0\n", "MAX_CLUSTERS = 10\n", "N_SAMPLES = 1000\n", "N_COLS = 3\n", "FIGSIZE = 7, 7 # does not affect size in webpage\n", "COLORS = [\n", " 'blue', 'orange', 'green', 'red', 'purple', 'brown', 'pink', 'gray', 'olive', 'cyan'\n", "]\n", "assert len(COLORS) >= MAX_CLUSTERS, \"Not enough different colors for all clusters\"\n", "np.random.seed(SEED)\n", "\n", "\n", "def normalize(X):\n", " return StandardScaler().fit_transform(X)\n", "\n", "def get_regular(n_clusters):\n", " # spiral pattern\n", " centers = [\n", " [0, 0],\n", " [1, 0],\n", " [1, 1],\n", " [0, 1],\n", " [-1, 1],\n", " [-1, 0],\n", " [-1, -1],\n", " [0, -1],\n", " [1, -1],\n", " [2, -1],\n", " ][:n_clusters]\n", " assert len(centers) == n_clusters\n", " X, labels = make_blobs(n_samples=N_SAMPLES, centers=centers, cluster_std=0.25, random_state=SEED)\n", " return normalize(X), labels\n", "\n", "\n", "def get_circles(n_clusters):\n", " X, labels = make_circles(n_samples=N_SAMPLES, factor=0.5, noise=0.05, random_state=SEED)\n", " return normalize(X), labels\n", "\n", "\n", "def get_moons(n_clusters):\n", " X, labels = make_moons(n_samples=N_SAMPLES, noise=0.05, random_state=SEED)\n", " return normalize(X), labels\n", "\n", "\n", "def get_noise(n_clusters):\n", " np.random.seed(SEED)\n", " X, labels = np.random.rand(N_SAMPLES, 2), np.random.randint(0, n_clusters, size=(N_SAMPLES,))\n", " return normalize(X), labels\n", "\n", "\n", "def get_anisotropic(n_clusters):\n", " X, labels = make_blobs(n_samples=N_SAMPLES, centers=n_clusters, random_state=170)\n", " transformation = [[0.6, -0.6], [-0.4, 0.8]]\n", " X = np.dot(X, transformation)\n", " return X, labels\n", "\n", "\n", "def get_varied(n_clusters):\n", " cluster_std = [1.0, 2.5, 0.5, 1.0, 2.5, 0.5, 1.0, 2.5, 0.5, 1.0][:n_clusters]\n", " assert len(cluster_std) == n_clusters\n", " X, labels = make_blobs(\n", " n_samples=N_SAMPLES, centers=n_clusters, cluster_std=cluster_std, random_state=SEED\n", " )\n", " return normalize(X), labels\n", "\n", "\n", "def get_spiral(n_clusters):\n", " # from https://scikit-learn.org/stable/auto_examples/cluster/plot_agglomerative_clustering.html\n", " np.random.seed(SEED)\n", " t = 1.5 * np.pi * (1 + 3 * np.random.rand(1, N_SAMPLES))\n", " x = t * np.cos(t)\n", " y = t * np.sin(t)\n", " X = np.concatenate((x, y))\n", " X += 0.7 * np.random.randn(2, N_SAMPLES)\n", " X = np.ascontiguousarray(X.T)\n", "\n", " labels = np.zeros(N_SAMPLES, dtype=int)\n", " return normalize(X), labels\n", "\n", "\n", "DATA_MAPPING = {\n", " 'regular': get_regular,\n", " 'circles': get_circles,\n", " 'moons': get_moons,\n", " 'spiral': get_spiral,\n", " 'noise': get_noise,\n", " 'anisotropic': get_anisotropic,\n", " 'varied': get_varied,\n", "}\n", "\n", "\n", "def get_groundtruth_model(X, labels, n_clusters, **kwargs):\n", " # dummy model to show true label distribution\n", " class Dummy:\n", " def __init__(self, y):\n", " self.labels_ = labels\n", "\n", " return Dummy(labels)\n", "\n", "\n", "def get_kmeans(X, labels, n_clusters, **kwargs):\n", " model = KMeans(init=\"k-means++\", n_clusters=n_clusters, n_init=10, random_state=SEED)\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "def get_dbscan(X, labels, n_clusters, **kwargs):\n", " model = DBSCAN(eps=0.3)\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "def get_agglomerative(X, labels, n_clusters, **kwargs):\n", " connectivity = kneighbors_graph(\n", " X, n_neighbors=n_clusters, include_self=False\n", " )\n", " # make connectivity symmetric\n", " connectivity = 0.5 * (connectivity + connectivity.T)\n", " model = AgglomerativeClustering(\n", " n_clusters=n_clusters, linkage=\"ward\", connectivity=connectivity\n", " )\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "def get_meanshift(X, labels, n_clusters, **kwargs):\n", " bandwidth = estimate_bandwidth(X, quantile=0.25)\n", " model = MeanShift(bandwidth=bandwidth, bin_seeding=True)\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "def get_spectral(X, labels, n_clusters, **kwargs):\n", " model = SpectralClustering(\n", " n_clusters=n_clusters,\n", " eigen_solver=\"arpack\",\n", " affinity=\"nearest_neighbors\",\n", " )\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "def get_optics(X, labels, n_clusters, **kwargs):\n", " model = OPTICS(\n", " min_samples=7,\n", " xi=0.05,\n", " min_cluster_size=0.1,\n", " )\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "def get_birch(X, labels, n_clusters, **kwargs):\n", " model = Birch(n_clusters=n_clusters)\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "def get_gaussianmixture(X, labels, n_clusters, **kwargs):\n", " model = GaussianMixture(\n", " n_components=n_clusters, covariance_type=\"full\", random_state=SEED,\n", " )\n", " model.set_params(**kwargs)\n", " return model.fit(X)\n", "\n", "\n", "MODEL_MAPPING = {\n", " 'True labels': get_groundtruth_model,\n", " 'KMeans': get_kmeans,\n", " 'DBSCAN': get_dbscan,\n", " 'MeanShift': get_meanshift,\n", " 'SpectralClustering': get_spectral,\n", " 'OPTICS': get_optics,\n", " 'Birch': get_birch,\n", " 'GaussianMixture': get_gaussianmixture,\n", " 'AgglomerativeClustering': get_agglomerative,\n", "}\n", "\n", "\n", "def plot_clusters(ax, X, labels):\n", " set_clusters = set(labels)\n", " set_clusters.discard(-1) # -1 signifiies outliers, which we plot separately\n", " for label, color in zip(sorted(set_clusters), COLORS):\n", " idx = labels == label\n", " if not sum(idx):\n", " continue\n", " ax.scatter(X[idx, 0], X[idx, 1], color=color)\n", "\n", " # show outliers (if any)\n", " idx = labels == -1\n", " if sum(idx):\n", " ax.scatter(X[idx, 0], X[idx, 1], c='k', marker='x')\n", "\n", " ax.grid(None)\n", " ax.set_xticks([])\n", " ax.set_yticks([])\n", " return ax\n", "\n", "\n", "def cluster(dataset: str, n_clusters: int, clustering_algorithm: str):\n", " if isinstance(n_clusters, dict):\n", " n_clusters = n_clusters['value']\n", " else:\n", " n_clusters = int(n_clusters)\n", "\n", " X, labels = DATA_MAPPING[dataset](n_clusters)\n", " model = MODEL_MAPPING[clustering_algorithm](X, labels, n_clusters=n_clusters)\n", " if hasattr(model, \"labels_\"):\n", " y_pred = model.labels_.astype(int)\n", " else:\n", " y_pred = model.predict(X)\n", "\n", " fig, ax = plt.subplots(figsize=FIGSIZE)\n", "\n", " plot_clusters(ax, X, y_pred)\n", " ax.set_title(clustering_algorithm, fontsize=16)\n", "\n", " return fig\n", "\n", "\n", "title = \"Clustering with Scikit-learn\"\n", "description = (\n", " \"This example shows how different clustering algorithms work. Simply pick \"\n", " \"the dataset and the number of clusters to see how the clustering algorithms work. \"\n", " \"Colored circles are (predicted) labels and black x are outliers.\"\n", ")\n", "\n", "\n", "def iter_grid(n_rows, n_cols):\n", " # create a grid using gradio Block\n", " for _ in range(n_rows):\n", " with gr.Row():\n", " for _ in range(n_cols):\n", " with gr.Column():\n", " yield\n", "\n", "with gr.Blocks(title=title) as demo:\n", " gr.HTML(f\"{title}\")\n", " gr.Markdown(description)\n", "\n", " input_models = list(MODEL_MAPPING)\n", " input_data = gr.Radio(\n", " list(DATA_MAPPING),\n", " value=\"regular\",\n", " label=\"dataset\"\n", " )\n", " input_n_clusters = gr.Slider(\n", " minimum=1,\n", " maximum=MAX_CLUSTERS,\n", " value=4,\n", " step=1,\n", " label='Number of clusters'\n", " )\n", " n_rows = int(math.ceil(len(input_models) / N_COLS))\n", " counter = 0\n", " for _ in iter_grid(n_rows, N_COLS):\n", " if counter >= len(input_models):\n", " break\n", "\n", " input_model = input_models[counter]\n", " plot = gr.Plot(label=input_model)\n", " fn = partial(cluster, clustering_algorithm=input_model)\n", " input_data.change(fn=fn, inputs=[input_data, input_n_clusters], outputs=plot)\n", " input_n_clusters.change(fn=fn, inputs=[input_data, input_n_clusters], outputs=plot)\n", " counter += 1\n", "\n", "demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/clustering/run.py b/testbed/gradio-app__gradio/demo/clustering/run.py new file mode 100644 index 0000000000000000000000000000000000000000..e3a67c00c2c55f040eb3df2d7447c02fb7d73af8 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/clustering/run.py @@ -0,0 +1,281 @@ +import gradio as gr +import math +from functools import partial +import matplotlib.pyplot as plt +import numpy as np +from sklearn.cluster import ( + AgglomerativeClustering, Birch, DBSCAN, KMeans, MeanShift, OPTICS, SpectralClustering, estimate_bandwidth +) +from sklearn.datasets import make_blobs, make_circles, make_moons +from sklearn.mixture import GaussianMixture +from sklearn.neighbors import kneighbors_graph +from sklearn.preprocessing import StandardScaler + +plt.style.use('seaborn-v0_8') +SEED = 0 +MAX_CLUSTERS = 10 +N_SAMPLES = 1000 +N_COLS = 3 +FIGSIZE = 7, 7 # does not affect size in webpage +COLORS = [ + 'blue', 'orange', 'green', 'red', 'purple', 'brown', 'pink', 'gray', 'olive', 'cyan' +] +assert len(COLORS) >= MAX_CLUSTERS, "Not enough different colors for all clusters" +np.random.seed(SEED) + + +def normalize(X): + return StandardScaler().fit_transform(X) + +def get_regular(n_clusters): + # spiral pattern + centers = [ + [0, 0], + [1, 0], + [1, 1], + [0, 1], + [-1, 1], + [-1, 0], + [-1, -1], + [0, -1], + [1, -1], + [2, -1], + ][:n_clusters] + assert len(centers) == n_clusters + X, labels = make_blobs(n_samples=N_SAMPLES, centers=centers, cluster_std=0.25, random_state=SEED) + return normalize(X), labels + + +def get_circles(n_clusters): + X, labels = make_circles(n_samples=N_SAMPLES, factor=0.5, noise=0.05, random_state=SEED) + return normalize(X), labels + + +def get_moons(n_clusters): + X, labels = make_moons(n_samples=N_SAMPLES, noise=0.05, random_state=SEED) + return normalize(X), labels + + +def get_noise(n_clusters): + np.random.seed(SEED) + X, labels = np.random.rand(N_SAMPLES, 2), np.random.randint(0, n_clusters, size=(N_SAMPLES,)) + return normalize(X), labels + + +def get_anisotropic(n_clusters): + X, labels = make_blobs(n_samples=N_SAMPLES, centers=n_clusters, random_state=170) + transformation = [[0.6, -0.6], [-0.4, 0.8]] + X = np.dot(X, transformation) + return X, labels + + +def get_varied(n_clusters): + cluster_std = [1.0, 2.5, 0.5, 1.0, 2.5, 0.5, 1.0, 2.5, 0.5, 1.0][:n_clusters] + assert len(cluster_std) == n_clusters + X, labels = make_blobs( + n_samples=N_SAMPLES, centers=n_clusters, cluster_std=cluster_std, random_state=SEED + ) + return normalize(X), labels + + +def get_spiral(n_clusters): + # from https://scikit-learn.org/stable/auto_examples/cluster/plot_agglomerative_clustering.html + np.random.seed(SEED) + t = 1.5 * np.pi * (1 + 3 * np.random.rand(1, N_SAMPLES)) + x = t * np.cos(t) + y = t * np.sin(t) + X = np.concatenate((x, y)) + X += 0.7 * np.random.randn(2, N_SAMPLES) + X = np.ascontiguousarray(X.T) + + labels = np.zeros(N_SAMPLES, dtype=int) + return normalize(X), labels + + +DATA_MAPPING = { + 'regular': get_regular, + 'circles': get_circles, + 'moons': get_moons, + 'spiral': get_spiral, + 'noise': get_noise, + 'anisotropic': get_anisotropic, + 'varied': get_varied, +} + + +def get_groundtruth_model(X, labels, n_clusters, **kwargs): + # dummy model to show true label distribution + class Dummy: + def __init__(self, y): + self.labels_ = labels + + return Dummy(labels) + + +def get_kmeans(X, labels, n_clusters, **kwargs): + model = KMeans(init="k-means++", n_clusters=n_clusters, n_init=10, random_state=SEED) + model.set_params(**kwargs) + return model.fit(X) + + +def get_dbscan(X, labels, n_clusters, **kwargs): + model = DBSCAN(eps=0.3) + model.set_params(**kwargs) + return model.fit(X) + + +def get_agglomerative(X, labels, n_clusters, **kwargs): + connectivity = kneighbors_graph( + X, n_neighbors=n_clusters, include_self=False + ) + # make connectivity symmetric + connectivity = 0.5 * (connectivity + connectivity.T) + model = AgglomerativeClustering( + n_clusters=n_clusters, linkage="ward", connectivity=connectivity + ) + model.set_params(**kwargs) + return model.fit(X) + + +def get_meanshift(X, labels, n_clusters, **kwargs): + bandwidth = estimate_bandwidth(X, quantile=0.25) + model = MeanShift(bandwidth=bandwidth, bin_seeding=True) + model.set_params(**kwargs) + return model.fit(X) + + +def get_spectral(X, labels, n_clusters, **kwargs): + model = SpectralClustering( + n_clusters=n_clusters, + eigen_solver="arpack", + affinity="nearest_neighbors", + ) + model.set_params(**kwargs) + return model.fit(X) + + +def get_optics(X, labels, n_clusters, **kwargs): + model = OPTICS( + min_samples=7, + xi=0.05, + min_cluster_size=0.1, + ) + model.set_params(**kwargs) + return model.fit(X) + + +def get_birch(X, labels, n_clusters, **kwargs): + model = Birch(n_clusters=n_clusters) + model.set_params(**kwargs) + return model.fit(X) + + +def get_gaussianmixture(X, labels, n_clusters, **kwargs): + model = GaussianMixture( + n_components=n_clusters, covariance_type="full", random_state=SEED, + ) + model.set_params(**kwargs) + return model.fit(X) + + +MODEL_MAPPING = { + 'True labels': get_groundtruth_model, + 'KMeans': get_kmeans, + 'DBSCAN': get_dbscan, + 'MeanShift': get_meanshift, + 'SpectralClustering': get_spectral, + 'OPTICS': get_optics, + 'Birch': get_birch, + 'GaussianMixture': get_gaussianmixture, + 'AgglomerativeClustering': get_agglomerative, +} + + +def plot_clusters(ax, X, labels): + set_clusters = set(labels) + set_clusters.discard(-1) # -1 signifiies outliers, which we plot separately + for label, color in zip(sorted(set_clusters), COLORS): + idx = labels == label + if not sum(idx): + continue + ax.scatter(X[idx, 0], X[idx, 1], color=color) + + # show outliers (if any) + idx = labels == -1 + if sum(idx): + ax.scatter(X[idx, 0], X[idx, 1], c='k', marker='x') + + ax.grid(None) + ax.set_xticks([]) + ax.set_yticks([]) + return ax + + +def cluster(dataset: str, n_clusters: int, clustering_algorithm: str): + if isinstance(n_clusters, dict): + n_clusters = n_clusters['value'] + else: + n_clusters = int(n_clusters) + + X, labels = DATA_MAPPING[dataset](n_clusters) + model = MODEL_MAPPING[clustering_algorithm](X, labels, n_clusters=n_clusters) + if hasattr(model, "labels_"): + y_pred = model.labels_.astype(int) + else: + y_pred = model.predict(X) + + fig, ax = plt.subplots(figsize=FIGSIZE) + + plot_clusters(ax, X, y_pred) + ax.set_title(clustering_algorithm, fontsize=16) + + return fig + + +title = "Clustering with Scikit-learn" +description = ( + "This example shows how different clustering algorithms work. Simply pick " + "the dataset and the number of clusters to see how the clustering algorithms work. " + "Colored circles are (predicted) labels and black x are outliers." +) + + +def iter_grid(n_rows, n_cols): + # create a grid using gradio Block + for _ in range(n_rows): + with gr.Row(): + for _ in range(n_cols): + with gr.Column(): + yield + +with gr.Blocks(title=title) as demo: + gr.HTML(f"{title}") + gr.Markdown(description) + + input_models = list(MODEL_MAPPING) + input_data = gr.Radio( + list(DATA_MAPPING), + value="regular", + label="dataset" + ) + input_n_clusters = gr.Slider( + minimum=1, + maximum=MAX_CLUSTERS, + value=4, + step=1, + label='Number of clusters' + ) + n_rows = int(math.ceil(len(input_models) / N_COLS)) + counter = 0 + for _ in iter_grid(n_rows, N_COLS): + if counter >= len(input_models): + break + + input_model = input_models[counter] + plot = gr.Plot(label=input_model) + fn = partial(cluster, clustering_algorithm=input_model) + input_data.change(fn=fn, inputs=[input_data, input_n_clusters], outputs=plot) + input_n_clusters.change(fn=fn, inputs=[input_data, input_n_clusters], outputs=plot) + counter += 1 + +demo.launch() diff --git a/testbed/gradio-app__gradio/demo/digit_classifier/requirements.txt b/testbed/gradio-app__gradio/demo/digit_classifier/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..b3a60b6da25b54d3e1a3696814eed57ef4ef62b7 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/digit_classifier/requirements.txt @@ -0,0 +1 @@ +tensorflow \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/digit_classifier/run.ipynb b/testbed/gradio-app__gradio/demo/digit_classifier/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..a2610e73cb4c5525b97204da76a913db771cc610 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/digit_classifier/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: digit_classifier"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio tensorflow"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["from urllib.request import urlretrieve\n", "\n", "import tensorflow as tf\n", "\n", "import gradio as gr\n", "\n", "urlretrieve(\n", " \"https://gr-models.s3-us-west-2.amazonaws.com/mnist-model.h5\", \"mnist-model.h5\"\n", ")\n", "model = tf.keras.models.load_model(\"mnist-model.h5\")\n", "\n", "\n", "def recognize_digit(image):\n", " image = image.reshape(1, -1)\n", " prediction = model.predict(image).tolist()[0]\n", " return {str(i): prediction[i] for i in range(10)}\n", "\n", "\n", "im = gr.Image(shape=(28, 28), image_mode=\"L\", invert_colors=False, source=\"canvas\")\n", "\n", "demo = gr.Interface(\n", " recognize_digit,\n", " im,\n", " gr.Label(num_top_classes=3),\n", " live=True,\n", " interpretation=\"default\",\n", " capture_session=True,\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/digit_classifier/run.py b/testbed/gradio-app__gradio/demo/digit_classifier/run.py new file mode 100644 index 0000000000000000000000000000000000000000..7d38d5bb1e676cf6d12a49105b5a0b9988529b62 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/digit_classifier/run.py @@ -0,0 +1,31 @@ +from urllib.request import urlretrieve + +import tensorflow as tf + +import gradio as gr + +urlretrieve( + "https://gr-models.s3-us-west-2.amazonaws.com/mnist-model.h5", "mnist-model.h5" +) +model = tf.keras.models.load_model("mnist-model.h5") + + +def recognize_digit(image): + image = image.reshape(1, -1) + prediction = model.predict(image).tolist()[0] + return {str(i): prediction[i] for i in range(10)} + + +im = gr.Image(shape=(28, 28), image_mode="L", invert_colors=False, source="canvas") + +demo = gr.Interface( + recognize_digit, + im, + gr.Label(num_top_classes=3), + live=True, + interpretation="default", + capture_session=True, +) + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/gender_sentence_custom_interpretation/run.ipynb b/testbed/gradio-app__gradio/demo/gender_sentence_custom_interpretation/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..35f32fad88ec97b67ace1707e5f4c52727683ee7 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/gender_sentence_custom_interpretation/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: gender_sentence_custom_interpretation"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import re\n", "\n", "import gradio as gr\n", "\n", "male_words, female_words = [\"he\", \"his\", \"him\"], [\"she\", \"hers\", \"her\"]\n", "\n", "\n", "def gender_of_sentence(sentence):\n", " male_count = len([word for word in sentence.split() if word.lower() in male_words])\n", " female_count = len(\n", " [word for word in sentence.split() if word.lower() in female_words]\n", " )\n", " total = max(male_count + female_count, 1)\n", " return {\"male\": male_count / total, \"female\": female_count / total}\n", "\n", "\n", "# Number of arguments to interpretation function must\n", "# match number of inputs to prediction function\n", "def interpret_gender(sentence):\n", " result = gender_of_sentence(sentence)\n", " is_male = result[\"male\"] > result[\"female\"]\n", " interpretation = []\n", " for word in re.split(\"( )\", sentence):\n", " score = 0\n", " token = word.lower()\n", " if (is_male and token in male_words) or (not is_male and token in female_words):\n", " score = 1\n", " elif (is_male and token in female_words) or (\n", " not is_male and token in male_words\n", " ):\n", " score = -1\n", " interpretation.append((word, score))\n", " # Output must be a list of lists containing the same number of elements as inputs\n", " # Each element corresponds to the interpretation scores for the given input\n", " return [interpretation]\n", "\n", "\n", "demo = gr.Interface(\n", " fn=gender_of_sentence,\n", " inputs=gr.Textbox(value=\"She went to his house to get her keys.\"),\n", " outputs=\"label\",\n", " interpretation=interpret_gender,\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/gender_sentence_custom_interpretation/run.py b/testbed/gradio-app__gradio/demo/gender_sentence_custom_interpretation/run.py new file mode 100644 index 0000000000000000000000000000000000000000..93a8f6c6cf485f2b22b6a15ef363b4f4fece03d6 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/gender_sentence_custom_interpretation/run.py @@ -0,0 +1,46 @@ +import re + +import gradio as gr + +male_words, female_words = ["he", "his", "him"], ["she", "hers", "her"] + + +def gender_of_sentence(sentence): + male_count = len([word for word in sentence.split() if word.lower() in male_words]) + female_count = len( + [word for word in sentence.split() if word.lower() in female_words] + ) + total = max(male_count + female_count, 1) + return {"male": male_count / total, "female": female_count / total} + + +# Number of arguments to interpretation function must +# match number of inputs to prediction function +def interpret_gender(sentence): + result = gender_of_sentence(sentence) + is_male = result["male"] > result["female"] + interpretation = [] + for word in re.split("( )", sentence): + score = 0 + token = word.lower() + if (is_male and token in male_words) or (not is_male and token in female_words): + score = 1 + elif (is_male and token in female_words) or ( + not is_male and token in male_words + ): + score = -1 + interpretation.append((word, score)) + # Output must be a list of lists containing the same number of elements as inputs + # Each element corresponds to the interpretation scores for the given input + return [interpretation] + + +demo = gr.Interface( + fn=gender_of_sentence, + inputs=gr.Textbox(value="She went to his house to get her keys."), + outputs="label", + interpretation=interpret_gender, +) + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/gender_sentence_default_interpretation/run.ipynb b/testbed/gradio-app__gradio/demo/gender_sentence_default_interpretation/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..3e99c9adb60b3667a4109e8366acd16a45a3c396 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/gender_sentence_default_interpretation/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: gender_sentence_default_interpretation"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "male_words, female_words = [\"he\", \"his\", \"him\"], [\"she\", \"hers\", \"her\"]\n", "\n", "\n", "def gender_of_sentence(sentence):\n", " male_count = len([word for word in sentence.split() if word.lower() in male_words])\n", " female_count = len(\n", " [word for word in sentence.split() if word.lower() in female_words]\n", " )\n", " total = max(male_count + female_count, 1)\n", " return {\"male\": male_count / total, \"female\": female_count / total}\n", "\n", "\n", "demo = gr.Interface(\n", " fn=gender_of_sentence,\n", " inputs=gr.Textbox(value=\"She went to his house to get her keys.\"),\n", " outputs=\"label\",\n", " interpretation=\"default\",\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/gender_sentence_default_interpretation/run.py b/testbed/gradio-app__gradio/demo/gender_sentence_default_interpretation/run.py new file mode 100644 index 0000000000000000000000000000000000000000..99312fda6c52f5ff754589cf99a8b7ef031d3f70 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/gender_sentence_default_interpretation/run.py @@ -0,0 +1,23 @@ +import gradio as gr + +male_words, female_words = ["he", "his", "him"], ["she", "hers", "her"] + + +def gender_of_sentence(sentence): + male_count = len([word for word in sentence.split() if word.lower() in male_words]) + female_count = len( + [word for word in sentence.split() if word.lower() in female_words] + ) + total = max(male_count + female_count, 1) + return {"male": male_count / total, "female": female_count / total} + + +demo = gr.Interface( + fn=gender_of_sentence, + inputs=gr.Textbox(value="She went to his house to get her keys."), + outputs="label", + interpretation="default", +) + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/generate_notebooks.py b/testbed/gradio-app__gradio/demo/generate_notebooks.py new file mode 100644 index 0000000000000000000000000000000000000000..a1644742ec499e9be0ec36ec1a5f1626a774e483 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/generate_notebooks.py @@ -0,0 +1,82 @@ +import nbformat as nbf +import os +import json +import random +import subprocess + +GRADIO_DEMO_DIR = os.getcwd() +DEMOS_TO_SKIP = {"all_demos", "reset_components", "custom_path", "kitchen_sink_random"} + +demos = os.listdir(GRADIO_DEMO_DIR) +demos = [demo for demo in demos if demo not in DEMOS_TO_SKIP and os.path.isdir(os.path.join(GRADIO_DEMO_DIR, demo)) and os.path.exists(os.path.join(GRADIO_DEMO_DIR, demo, "run.py"))] + +def git_tracked(demo, file): + osstdout = subprocess.Popen(f"cd {demo} && git ls-files --error-unmatch {file}", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) + osstdout.wait() + return not osstdout.returncode + +for demo in demos: + nb = nbf.v4.new_notebook() + text = f"# Gradio Demo: {demo}" + + if os.path.exists(os.path.join(GRADIO_DEMO_DIR, demo, "DESCRIPTION.md")): + with open(os.path.join(GRADIO_DEMO_DIR, demo, "DESCRIPTION.md"), "r", encoding="utf8") as f: + description = f.read() + text += f"""\n### {description} + """ + + files = os.listdir(os.path.join(GRADIO_DEMO_DIR, demo)) + skip = ["run.py", "run.ipynb", "setup.sh", ".gitignore", "requirements.txt", "DESCRIPTION.md", "screenshot.png", "screenshot.gif"] + files = [file for file in files if file not in skip if git_tracked(demo, file)] + files.sort() + if files: + get_files = "# Downloading files from the demo repo\nimport os" + for file in files: + if os.path.isdir(os.path.join(GRADIO_DEMO_DIR, demo, file)): + sub_files = os.listdir(os.path.join(GRADIO_DEMO_DIR, demo, file)) + sub_files = [sub for sub in sub_files if sub not in skip if git_tracked(demo, f"{file}/{sub}")] + sub_files.sort() + if sub_files: + get_files += f"\nos.mkdir('{file}')" + for sub_file in sub_files: + get_files += f"\n!wget -q -O {file}/{sub_file} https://github.com/gradio-app/gradio/raw/main/demo/{demo}/{file}/{sub_file}" + else: + get_files += f"\n!wget -q https://github.com/gradio-app/gradio/raw/main/demo/{demo}/{file}" + + requirements = "" + if os.path.exists(os.path.join(GRADIO_DEMO_DIR, demo, "requirements.txt")): + with open(os.path.join(GRADIO_DEMO_DIR, demo, "requirements.txt"), "r", encoding="utf8") as f: + requirements = f.read().split("\n") + requirements = " ".join(requirements) + + installs = f"!pip install -q gradio {requirements}" + + with open(os.path.join(GRADIO_DEMO_DIR, demo, "run.py"), "r", encoding="utf8") as f: + code = f.read() + code = code.replace("os.path.dirname(__file__)", "os.path.abspath('')") + + if files: + nb['cells'] = [nbf.v4.new_markdown_cell(text), + nbf.v4.new_code_cell(installs), + nbf.v4.new_code_cell(get_files), + nbf.v4.new_code_cell(code)] + else: + nb['cells'] = [nbf.v4.new_markdown_cell(text), + nbf.v4.new_code_cell(installs), + nbf.v4.new_code_cell(code)] + + output_notebook = os.path.join(GRADIO_DEMO_DIR, demo, "run.ipynb") + + with open(output_notebook, 'w', encoding="utf8") as f: + nbf.write(nb, f) + + with open(output_notebook, "r", encoding="utf8") as f: + content = f.read() + + content = json.loads(content) + for i, cell in enumerate(content["cells"]): + random.seed(i) + cell["id"] = random.getrandbits(128) + + with open(output_notebook, "w", encoding="utf8") as f: + f.write(json.dumps(content)) \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/image_classifier_interpretation/files/imagenet_labels.json b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/files/imagenet_labels.json new file mode 100644 index 0000000000000000000000000000000000000000..fa059ceeab5d18ab9882df2983b92e8270373e39 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/files/imagenet_labels.json @@ -0,0 +1,1000 @@ +["tench", + "goldfish", + "great white shark", + "tiger shark", + "hammerhead shark", + "electric ray", + "stingray", + "cock", + "hen", + "ostrich", + "brambling", + "goldfinch", + "house finch", + "junco", + "indigo bunting", + "American robin", + "bulbul", + "jay", + "magpie", + "chickadee", + "American dipper", + "kite", + "bald eagle", + "vulture", + "great grey owl", + "fire salamander", + "smooth newt", + "newt", + "spotted salamander", + "axolotl", + "American bullfrog", + "tree frog", + "tailed frog", + "loggerhead sea turtle", + "leatherback sea turtle", + "mud turtle", + "terrapin", + "box turtle", + "banded gecko", + "green iguana", + "Carolina anole", + "desert grassland whiptail lizard", + "agama", + "frilled-necked lizard", + "alligator lizard", + "Gila monster", + "European green lizard", + "chameleon", + "Komodo dragon", + "Nile crocodile", + "American alligator", + "triceratops", + "worm snake", + "ring-necked snake", + "eastern hog-nosed snake", + "smooth green snake", + "kingsnake", + "garter snake", + "water snake", + "vine snake", + "night snake", + "boa constrictor", + "African rock python", + "Indian cobra", + "green mamba", + "sea snake", + "Saharan horned viper", + "eastern diamondback rattlesnake", + "sidewinder", + "trilobite", + "harvestman", + "scorpion", + "yellow garden spider", + "barn spider", + "European garden spider", + "southern black widow", + "tarantula", + "wolf spider", + "tick", + "centipede", + "black grouse", + "ptarmigan", + "ruffed grouse", + "prairie grouse", + "peacock", + "quail", + "partridge", + "grey parrot", + "macaw", + "sulphur-crested cockatoo", + "lorikeet", + "coucal", + "bee eater", + "hornbill", + "hummingbird", + "jacamar", + "toucan", + "duck", + "red-breasted merganser", + "goose", + "black swan", + "tusker", + "echidna", + "platypus", + "wallaby", + "koala", + "wombat", + "jellyfish", + "sea anemone", + "brain coral", + "flatworm", + "nematode", + "conch", + "snail", + "slug", + "sea slug", + "chiton", + "chambered nautilus", + "Dungeness crab", + "rock crab", + "fiddler crab", + "red king crab", + "American lobster", + "spiny lobster", + "crayfish", + "hermit crab", + "isopod", + "white stork", + "black stork", + "spoonbill", + "flamingo", + "little blue heron", + "great egret", + "bittern", + "crane (bird)", + "limpkin", + "common gallinule", + "American coot", + "bustard", + "ruddy turnstone", + "dunlin", + "common redshank", + "dowitcher", + "oystercatcher", + "pelican", + "king penguin", + "albatross", + "grey whale", + "killer whale", + "dugong", + "sea lion", + "Chihuahua", + "Japanese Chin", + "Maltese", + "Pekingese", + "Shih Tzu", + "King Charles Spaniel", + "Papillon", + "toy terrier", + "Rhodesian Ridgeback", + "Afghan Hound", + "Basset Hound", + "Beagle", + "Bloodhound", + "Bluetick Coonhound", + "Black and Tan Coonhound", + "Treeing Walker Coonhound", + "English foxhound", + "Redbone Coonhound", + "borzoi", + "Irish Wolfhound", + "Italian Greyhound", + "Whippet", + "Ibizan Hound", + "Norwegian Elkhound", + "Otterhound", + "Saluki", + "Scottish Deerhound", + "Weimaraner", + "Staffordshire Bull Terrier", + "American Staffordshire Terrier", + "Bedlington Terrier", + "Border Terrier", + "Kerry Blue Terrier", + "Irish Terrier", + "Norfolk Terrier", + "Norwich Terrier", + "Yorkshire Terrier", + "Wire Fox Terrier", + "Lakeland Terrier", + "Sealyham Terrier", + "Airedale Terrier", + "Cairn Terrier", + "Australian Terrier", + "Dandie Dinmont Terrier", + "Boston Terrier", + "Miniature Schnauzer", + "Giant Schnauzer", + "Standard Schnauzer", + "Scottish Terrier", + "Tibetan Terrier", + "Australian Silky Terrier", + "Soft-coated Wheaten Terrier", + "West Highland White Terrier", + "Lhasa Apso", + "Flat-Coated Retriever", + "Curly-coated Retriever", + "Golden Retriever", + "Labrador Retriever", + "Chesapeake Bay Retriever", + "German Shorthaired Pointer", + "Vizsla", + "English Setter", + "Irish Setter", + "Gordon Setter", + "Brittany", + "Clumber Spaniel", + "English Springer Spaniel", + "Welsh Springer Spaniel", + "Cocker Spaniels", + "Sussex Spaniel", + "Irish Water Spaniel", + "Kuvasz", + "Schipperke", + "Groenendael", + "Malinois", + "Briard", + "Australian Kelpie", + "Komondor", + "Old English Sheepdog", + "Shetland Sheepdog", + "collie", + "Border Collie", + "Bouvier des Flandres", + "Rottweiler", + "German Shepherd Dog", + "Dobermann", + "Miniature Pinscher", + "Greater Swiss Mountain Dog", + "Bernese Mountain Dog", + "Appenzeller Sennenhund", + "Entlebucher Sennenhund", + "Boxer", + "Bullmastiff", + "Tibetan Mastiff", + "French Bulldog", + "Great Dane", + "St. Bernard", + "husky", + "Alaskan Malamute", + "Siberian Husky", + "Dalmatian", + "Affenpinscher", + "Basenji", + "pug", + "Leonberger", + "Newfoundland", + "Pyrenean Mountain Dog", + "Samoyed", + "Pomeranian", + "Chow Chow", + "Keeshond", + "Griffon Bruxellois", + "Pembroke Welsh Corgi", + "Cardigan Welsh Corgi", + "Toy Poodle", + "Miniature Poodle", + "Standard Poodle", + "Mexican hairless dog", + "grey wolf", + "Alaskan tundra wolf", + "red wolf", + "coyote", + "dingo", + "dhole", + "African wild dog", + "hyena", + "red fox", + "kit fox", + "Arctic fox", + "grey fox", + "tabby cat", + "tiger cat", + "Persian cat", + "Siamese cat", + "Egyptian Mau", + "cougar", + "lynx", + "leopard", + "snow leopard", + "jaguar", + "lion", + "tiger", + "cheetah", + "brown bear", + "American black bear", + "polar bear", + "sloth bear", + "mongoose", + "meerkat", + "tiger beetle", + "ladybug", + "ground beetle", + "longhorn beetle", + "leaf beetle", + "dung beetle", + "rhinoceros beetle", + "weevil", + "fly", + "bee", + "ant", + "grasshopper", + "cricket", + "stick insect", + "cockroach", + "mantis", + "cicada", + "leafhopper", + "lacewing", + "dragonfly", + "damselfly", + "red admiral", + "ringlet", + "monarch butterfly", + "small white", + "sulphur butterfly", + "gossamer-winged butterfly", + "starfish", + "sea urchin", + "sea cucumber", + "cottontail rabbit", + "hare", + "Angora rabbit", + "hamster", + "porcupine", + "fox squirrel", + "marmot", + "beaver", + "guinea pig", + "common sorrel", + "zebra", + "pig", + "wild boar", + "warthog", + "hippopotamus", + "ox", + "water buffalo", + "bison", + "ram", + "bighorn sheep", + "Alpine ibex", + "hartebeest", + "impala", + "gazelle", + "dromedary", + "llama", + "weasel", + "mink", + "European polecat", + "black-footed ferret", + "otter", + "skunk", + "badger", + "armadillo", + "three-toed sloth", + "orangutan", + "gorilla", + "chimpanzee", + "gibbon", + "siamang", + "guenon", + "patas monkey", + "baboon", + "macaque", + "langur", + "black-and-white colobus", + "proboscis monkey", + "marmoset", + "white-headed capuchin", + "howler monkey", + "titi", + "Geoffroy's spider monkey", + "common squirrel monkey", + "ring-tailed lemur", + "indri", + "Asian elephant", + "African bush elephant", + "red panda", + "giant panda", + "snoek", + "eel", + "coho salmon", + "rock beauty", + "clownfish", + "sturgeon", + "garfish", + "lionfish", + "pufferfish", + "abacus", + "abaya", + "academic gown", + "accordion", + "acoustic guitar", + "aircraft carrier", + "airliner", + "airship", + "altar", + "ambulance", + "amphibious vehicle", + "analog clock", + "apiary", + "apron", + "waste container", + "assault rifle", + "backpack", + "bakery", + "balance beam", + "balloon", + "ballpoint pen", + "Band-Aid", + "banjo", + "baluster", + "barbell", + "barber chair", + "barbershop", + "barn", + "barometer", + "barrel", + "wheelbarrow", + "baseball", + "basketball", + "bassinet", + "bassoon", + "swimming cap", + "bath towel", + "bathtub", + "station wagon", + "lighthouse", + "beaker", + "military cap", + "beer bottle", + "beer glass", + "bell-cot", + "bib", + "tandem bicycle", + "bikini", + "ring binder", + "binoculars", + "birdhouse", + "boathouse", + "bobsleigh", + "bolo tie", + "poke bonnet", + "bookcase", + "bookstore", + "bottle cap", + "bow", + "bow tie", + "brass", + "bra", + "breakwater", + "breastplate", + "broom", + "bucket", + "buckle", + "bulletproof vest", + "high-speed train", + "butcher shop", + "taxicab", + "cauldron", + "candle", + "cannon", + "canoe", + "can opener", + "cardigan", + "car mirror", + "carousel", + "tool kit", + "carton", + "car wheel", + "automated teller machine", + "cassette", + "cassette player", + "castle", + "catamaran", + "CD player", + "cello", + "mobile phone", + "chain", + "chain-link fence", + "chain mail", + "chainsaw", + "chest", + "chiffonier", + "chime", + "china cabinet", + "Christmas stocking", + "church", + "movie theater", + "cleaver", + "cliff dwelling", + "cloak", + "clogs", + "cocktail shaker", + "coffee mug", + "coffeemaker", + "coil", + "combination lock", + "computer keyboard", + "confectionery store", + "container ship", + "convertible", + "corkscrew", + "cornet", + "cowboy boot", + "cowboy hat", + "cradle", + "crane (machine)", + "crash helmet", + "crate", + "infant bed", + "Crock Pot", + "croquet ball", + "crutch", + "cuirass", + "dam", + "desk", + "desktop computer", + "rotary dial telephone", + "diaper", + "digital clock", + "digital watch", + "dining table", + "dishcloth", + "dishwasher", + "disc brake", + "dock", + "dog sled", + "dome", + "doormat", + "drilling rig", + "drum", + "drumstick", + "dumbbell", + "Dutch oven", + "electric fan", + "electric guitar", + "electric locomotive", + "entertainment center", + "envelope", + "espresso machine", + "face powder", + "feather boa", + "filing cabinet", + "fireboat", + "fire engine", + "fire screen sheet", + "flagpole", + "flute", + "folding chair", + "football helmet", + "forklift", + "fountain", + "fountain pen", + "four-poster bed", + "freight car", + "French horn", + "frying pan", + "fur coat", + "garbage truck", + "gas mask", + "gas pump", + "goblet", + "go-kart", + "golf ball", + "golf cart", + "gondola", + "gong", + "gown", + "grand piano", + "greenhouse", + "grille", + "grocery store", + "guillotine", + "barrette", + "hair spray", + "half-track", + "hammer", + "hamper", + "hair dryer", + "hand-held computer", + "handkerchief", + "hard disk drive", + "harmonica", + "harp", + "harvester", + "hatchet", + "holster", + "home theater", + "honeycomb", + "hook", + "hoop skirt", + "horizontal bar", + "horse-drawn vehicle", + "hourglass", + "iPod", + "clothes iron", + "jack-o'-lantern", + "jeans", + "jeep", + "T-shirt", + "jigsaw puzzle", + "pulled rickshaw", + "joystick", + "kimono", + "knee pad", + "knot", + "lab coat", + "ladle", + "lampshade", + "laptop computer", + "lawn mower", + "lens cap", + "paper knife", + "library", + "lifeboat", + "lighter", + "limousine", + "ocean liner", + "lipstick", + "slip-on shoe", + "lotion", + "speaker", + "loupe", + "sawmill", + "magnetic compass", + "mail bag", + "mailbox", + "tights", + "tank suit", + "manhole cover", + "maraca", + "marimba", + "mask", + "match", + "maypole", + "maze", + "measuring cup", + "medicine chest", + "megalith", + "microphone", + "microwave oven", + "military uniform", + "milk can", + "minibus", + "miniskirt", + "minivan", + "missile", + "mitten", + "mixing bowl", + "mobile home", + "Model T", + "modem", + "monastery", + "monitor", + "moped", + "mortar", + "square academic cap", + "mosque", + "mosquito net", + "scooter", + "mountain bike", + "tent", + "computer mouse", + "mousetrap", + "moving van", + "muzzle", + "nail", + "neck brace", + "necklace", + "nipple", + "notebook computer", + "obelisk", + "oboe", + "ocarina", + "odometer", + "oil filter", + "organ", + "oscilloscope", + "overskirt", + "bullock cart", + "oxygen mask", + "packet", + "paddle", + "paddle wheel", + "padlock", + "paintbrush", + "pajamas", + "palace", + "pan flute", + "paper towel", + "parachute", + "parallel bars", + "park bench", + "parking meter", + "passenger car", + "patio", + "payphone", + "pedestal", + "pencil case", + "pencil sharpener", + "perfume", + "Petri dish", + "photocopier", + "plectrum", + "Pickelhaube", + "picket fence", + "pickup truck", + "pier", + "piggy bank", + "pill bottle", + "pillow", + "ping-pong ball", + "pinwheel", + "pirate ship", + "pitcher", + "hand plane", + "planetarium", + "plastic bag", + "plate rack", + "plow", + "plunger", + "Polaroid camera", + "pole", + "police van", + "poncho", + "billiard table", + "soda bottle", + "pot", + "potter's wheel", + "power drill", + "prayer rug", + "printer", + "prison", + "projectile", + "projector", + "hockey puck", + "punching bag", + "purse", + "quill", + "quilt", + "race car", + "racket", + "radiator", + "radio", + "radio telescope", + "rain barrel", + "recreational vehicle", + "reel", + "reflex camera", + "refrigerator", + "remote control", + "restaurant", + "revolver", + "rifle", + "rocking chair", + "rotisserie", + "eraser", + "rugby ball", + "ruler", + "running shoe", + "safe", + "safety pin", + "salt shaker", + "sandal", + "sarong", + "saxophone", + "scabbard", + "weighing scale", + "school bus", + "schooner", + "scoreboard", + "CRT screen", + "screw", + "screwdriver", + "seat belt", + "sewing machine", + "shield", + "shoe store", + "shoji", + "shopping basket", + "shopping cart", + "shovel", + "shower cap", + "shower curtain", + "ski", + "ski mask", + "sleeping bag", + "slide rule", + "sliding door", + "slot machine", + "snorkel", + "snowmobile", + "snowplow", + "soap dispenser", + "soccer ball", + "sock", + "solar thermal collector", + "sombrero", + "soup bowl", + "space bar", + "space heater", + "space shuttle", + "spatula", + "motorboat", + "spider web", + "spindle", + "sports car", + "spotlight", + "stage", + "steam locomotive", + "through arch bridge", + "steel drum", + "stethoscope", + "scarf", + "stone wall", + "stopwatch", + "stove", + "strainer", + "tram", + "stretcher", + "couch", + "stupa", + "submarine", + "suit", + "sundial", + "sunglass", + "sunglasses", + "sunscreen", + "suspension bridge", + "mop", + "sweatshirt", + "swimsuit", + "swing", + "switch", + "syringe", + "table lamp", + "tank", + "tape player", + "teapot", + "teddy bear", + "television", + "tennis ball", + "thatched roof", + "front curtain", + "thimble", + "threshing machine", + "throne", + "tile roof", + "toaster", + "tobacco shop", + "toilet seat", + "torch", + "totem pole", + "tow truck", + "toy store", + "tractor", + "semi-trailer truck", + "tray", + "trench coat", + "tricycle", + "trimaran", + "tripod", + "triumphal arch", + "trolleybus", + "trombone", + "tub", + "turnstile", + "typewriter keyboard", + "umbrella", + "unicycle", + "upright piano", + "vacuum cleaner", + "vase", + "vault", + "velvet", + "vending machine", + "vestment", + "viaduct", + "violin", + "volleyball", + "waffle iron", + "wall clock", + "wallet", + "wardrobe", + "military aircraft", + "sink", + "washing machine", + "water bottle", + "water jug", + "water tower", + "whiskey jug", + "whistle", + "wig", + "window screen", + "window shade", + "Windsor tie", + "wine bottle", + "wing", + "wok", + "wooden spoon", + "wool", + "split-rail fence", + "shipwreck", + "yawl", + "yurt", + "website", + "comic book", + "crossword", + "traffic sign", + "traffic light", + "dust jacket", + "menu", + "plate", + "guacamole", + "consomme", + "hot pot", + "trifle", + "ice cream", + "ice pop", + "baguette", + "bagel", + "pretzel", + "cheeseburger", + "hot dog", + "mashed potato", + "cabbage", + "broccoli", + "cauliflower", + "zucchini", + "spaghetti squash", + "acorn squash", + "butternut squash", + "cucumber", + "artichoke", + "bell pepper", + "cardoon", + "mushroom", + "Granny Smith", + "strawberry", + "orange", + "lemon", + "fig", + "pineapple", + "banana", + "jackfruit", + "custard apple", + "pomegranate", + "hay", + "carbonara", + "chocolate syrup", + "dough", + "meatloaf", + "pizza", + "pot pie", + "burrito", + "red wine", + "espresso", + "cup", + "eggnog", + "alp", + "bubble", + "cliff", + "coral reef", + "geyser", + "lakeshore", + "promontory", + "shoal", + "seashore", + "valley", + "volcano", + "baseball player", + "bridegroom", + "scuba diver", + "rapeseed", + "daisy", + "yellow lady's slipper", + "corn", + "acorn", + "rose hip", + "horse chestnut seed", + "coral fungus", + "agaric", + "gyromitra", + "stinkhorn mushroom", + "earth star", + "hen-of-the-woods", + "bolete", + "ear", + "toilet paper"] \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/image_classifier_interpretation/requirements.txt b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..c2bbfa1f2b3c72184f27937009c87c54c826806c --- /dev/null +++ b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/requirements.txt @@ -0,0 +1,2 @@ +numpy +tensorflow \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/image_classifier_interpretation/run.ipynb b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..92fcfcd7142ea5f7de3ffb7f58ec8e3739f7abd3 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: image_classifier_interpretation"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio numpy tensorflow"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "os.mkdir('files')\n", "!wget -q -O files/imagenet_labels.json https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interpretation/files/imagenet_labels.json\n", "os.mkdir('images')\n", "!wget -q -O images/cheetah1.jpg https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interpretation/images/cheetah1.jpg\n", "!wget -q -O images/lion.jpg https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interpretation/images/lion.jpg"]}, {"cell_type": "code", "execution_count": null, "id": 44380577570523278879349135829904343037, "metadata": {}, "outputs": [], "source": ["import requests\n", "import tensorflow as tf\n", "\n", "import gradio as gr\n", "\n", "inception_net = tf.keras.applications.MobileNetV2() # load the model\n", "\n", "# Download human-readable labels for ImageNet.\n", "response = requests.get(\"https://git.io/JJkYN\")\n", "labels = response.text.split(\"\\n\")\n", "\n", "\n", "def classify_image(inp):\n", " inp = inp.reshape((-1, 224, 224, 3))\n", " inp = tf.keras.applications.mobilenet_v2.preprocess_input(inp)\n", " prediction = inception_net.predict(inp).flatten()\n", " return {labels[i]: float(prediction[i]) for i in range(1000)}\n", "\n", "\n", "image = gr.Image(shape=(224, 224))\n", "label = gr.Label(num_top_classes=3)\n", "\n", "demo = gr.Interface(\n", " fn=classify_image, inputs=image, outputs=label, interpretation=\"default\"\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/image_classifier_interpretation/run.py b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/run.py new file mode 100644 index 0000000000000000000000000000000000000000..f052b3ec80ace747429aa99e3a8e3aa37e465121 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/image_classifier_interpretation/run.py @@ -0,0 +1,28 @@ +import requests +import tensorflow as tf + +import gradio as gr + +inception_net = tf.keras.applications.MobileNetV2() # load the model + +# Download human-readable labels for ImageNet. +response = requests.get("https://git.io/JJkYN") +labels = response.text.split("\n") + + +def classify_image(inp): + inp = inp.reshape((-1, 224, 224, 3)) + inp = tf.keras.applications.mobilenet_v2.preprocess_input(inp) + prediction = inception_net.predict(inp).flatten() + return {labels[i]: float(prediction[i]) for i in range(1000)} + + +image = gr.Image(shape=(224, 224)) +label = gr.Label(num_top_classes=3) + +demo = gr.Interface( + fn=classify_image, inputs=image, outputs=label, interpretation="default" +) + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/demo/interface_parallel/run.ipynb b/testbed/gradio-app__gradio/demo/interface_parallel/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..b74d27f11741d1b00bfdbb23dc51e50567a9f630 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_parallel/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_parallel"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "greeter_1 = gr.Interface(lambda name: f\"Hello {name}!\", inputs=\"textbox\", outputs=gr.Textbox(label=\"Greeter 1\"))\n", "greeter_2 = gr.Interface(lambda name: f\"Greetings {name}!\", inputs=\"textbox\", outputs=gr.Textbox(label=\"Greeter 2\"))\n", "demo = gr.Parallel(greeter_1, greeter_2)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interface_parallel/run.py b/testbed/gradio-app__gradio/demo/interface_parallel/run.py new file mode 100644 index 0000000000000000000000000000000000000000..8ac946bd166cc889554294c8ea4a0d74bcb240bf --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_parallel/run.py @@ -0,0 +1,8 @@ +import gradio as gr + +greeter_1 = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 1")) +greeter_2 = gr.Interface(lambda name: f"Greetings {name}!", inputs="textbox", outputs=gr.Textbox(label="Greeter 2")) +demo = gr.Parallel(greeter_1, greeter_2) + +if __name__ == "__main__": + demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interface_parallel_load/run.ipynb b/testbed/gradio-app__gradio/demo/interface_parallel_load/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..08bd42d7dec5585fdfcdce25f0430c7c0f84be80 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_parallel_load/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_parallel_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "generator1 = gr.load(\"huggingface/gpt2\")\n", "generator2 = gr.load(\"huggingface/gpt2-xl\")\n", "\n", "demo = gr.Parallel(generator1, generator2)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interface_parallel_load/run.py b/testbed/gradio-app__gradio/demo/interface_parallel_load/run.py new file mode 100644 index 0000000000000000000000000000000000000000..5adb5b8322c1698fe6dfe6460902585827bf0647 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_parallel_load/run.py @@ -0,0 +1,9 @@ +import gradio as gr + +generator1 = gr.load("huggingface/gpt2") +generator2 = gr.load("huggingface/gpt2-xl") + +demo = gr.Parallel(generator1, generator2) + +if __name__ == "__main__": + demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interface_series/run.ipynb b/testbed/gradio-app__gradio/demo/interface_series/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..638bb79eea22cc9dbb2bcf9c25af470b47e72880 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_series/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_series"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "get_name = gr.Interface(lambda name: name, inputs=\"textbox\", outputs=\"textbox\")\n", "prepend_hello = gr.Interface(lambda name: f\"Hello {name}!\", inputs=\"textbox\", outputs=\"textbox\")\n", "append_nice = gr.Interface(lambda greeting: f\"{greeting} Nice to meet you!\",\n", " inputs=\"textbox\", outputs=gr.Textbox(label=\"Greeting\"))\n", "demo = gr.Series(get_name, prepend_hello, append_nice)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interface_series/run.py b/testbed/gradio-app__gradio/demo/interface_series/run.py new file mode 100644 index 0000000000000000000000000000000000000000..ac942ff94b23e605f75948797f796208e11e3a9a --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_series/run.py @@ -0,0 +1,10 @@ +import gradio as gr + +get_name = gr.Interface(lambda name: name, inputs="textbox", outputs="textbox") +prepend_hello = gr.Interface(lambda name: f"Hello {name}!", inputs="textbox", outputs="textbox") +append_nice = gr.Interface(lambda greeting: f"{greeting} Nice to meet you!", + inputs="textbox", outputs=gr.Textbox(label="Greeting")) +demo = gr.Series(get_name, prepend_hello, append_nice) + +if __name__ == "__main__": + demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interface_series_load/run.ipynb b/testbed/gradio-app__gradio/demo/interface_series_load/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..601225736450d65f45ab1e3d6c688747cd59857c --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_series_load/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_series_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "generator = gr.load(\"huggingface/gpt2\")\n", "translator = gr.load(\"huggingface/t5-small\")\n", "\n", "demo = gr.Series(generator, translator, description=\"This demo combines two Spaces: a text generator (`huggingface/gpt2`) and a text translator (`huggingface/t5-small`). The first Space takes a prompt as input and generates a text. The second Space takes the generated text as input and translates it into another language.\")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interface_series_load/run.py b/testbed/gradio-app__gradio/demo/interface_series_load/run.py new file mode 100644 index 0000000000000000000000000000000000000000..13703ccadbf81719f49e11a8095afd8b1fc6b38d --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interface_series_load/run.py @@ -0,0 +1,9 @@ +import gradio as gr + +generator = gr.load("huggingface/gpt2") +translator = gr.load("huggingface/t5-small") + +demo = gr.Series(generator, translator, description="This demo combines two Spaces: a text generator (`huggingface/gpt2`) and a text translator (`huggingface/t5-small`). The first Space takes a prompt as input and generates a text. The second Space takes the generated text as input and translates it into another language.") + +if __name__ == "__main__": + demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interpretation_component/requirements.txt b/testbed/gradio-app__gradio/demo/interpretation_component/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..bb3a20d8cf4aba8894a3046ad20f106d7541b711 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interpretation_component/requirements.txt @@ -0,0 +1,3 @@ +shap +transformers +torch \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interpretation_component/run.ipynb b/testbed/gradio-app__gradio/demo/interpretation_component/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..3ffe8c4a11de80bd768c2c896443e789f39da079 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interpretation_component/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interpretation_component"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio shap transformers torch"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import shap\n", "from transformers import pipeline\n", "\n", "\n", "sentiment_classifier = pipeline(\"text-classification\", return_all_scores=True)\n", "\n", "def interpretation_function(text):\n", " explainer = shap.Explainer(sentiment_classifier)\n", " shap_values = explainer([text])\n", " scores = list(zip(shap_values.data[0], shap_values.values[0, :, 1]))\n", " return {\"original\": text, \"interpretation\": scores}\n", "\n", "with gr.Blocks() as demo:\n", " with gr.Row():\n", " with gr.Column():\n", " input_text = gr.Textbox(label=\"Sentiment Analysis\", value=\"Wonderfully terrible\")\n", " with gr.Row():\n", " interpret = gr.Button(\"Interpret\")\n", " with gr.Column():\n", " interpretation = gr.components.Interpretation(input_text)\n", "\n", " interpret.click(interpretation_function, input_text, interpretation)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/interpretation_component/run.py b/testbed/gradio-app__gradio/demo/interpretation_component/run.py new file mode 100644 index 0000000000000000000000000000000000000000..53ecfc5c5a35b8d1bd6f025dcc772dbdd4f9a627 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/interpretation_component/run.py @@ -0,0 +1,26 @@ +import gradio as gr +import shap +from transformers import pipeline + + +sentiment_classifier = pipeline("text-classification", return_all_scores=True) + +def interpretation_function(text): + explainer = shap.Explainer(sentiment_classifier) + shap_values = explainer([text]) + scores = list(zip(shap_values.data[0], shap_values.values[0, :, 1])) + return {"original": text, "interpretation": scores} + +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + input_text = gr.Textbox(label="Sentiment Analysis", value="Wonderfully terrible") + with gr.Row(): + interpret = gr.Button("Interpret") + with gr.Column(): + interpretation = gr.components.Interpretation(input_text) + + interpret.click(interpretation_function, input_text, interpretation) + +if __name__ == "__main__": + demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/leaderboard/DESCRIPTION.md b/testbed/gradio-app__gradio/demo/leaderboard/DESCRIPTION.md new file mode 100644 index 0000000000000000000000000000000000000000..39267b584fec09a88b94170039959ec3ea2f3f58 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/leaderboard/DESCRIPTION.md @@ -0,0 +1 @@ +A simple dashboard ranking spaces by number of likes. \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/leaderboard/run.ipynb b/testbed/gradio-app__gradio/demo/leaderboard/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..b368932998ecf19964e350674cdad31c8849ff1b --- /dev/null +++ b/testbed/gradio-app__gradio/demo/leaderboard/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: leaderboard\n", "### A simple dashboard ranking spaces by number of likes.\n", " "]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import requests\n", "import pandas as pd\n", "from huggingface_hub.hf_api import SpaceInfo\n", "path = f\"https://huggingface.co/api/spaces\"\n", "\n", "\n", "def get_blocks_party_spaces():\n", " r = requests.get(path)\n", " d = r.json()\n", " spaces = [SpaceInfo(**x) for x in d]\n", " blocks_spaces = {}\n", " for i in range(0,len(spaces)):\n", " if spaces[i].id.split('/')[0] == 'Gradio-Blocks' and hasattr(spaces[i], 'likes') and spaces[i].id != 'Gradio-Blocks/Leaderboard' and spaces[i].id != 'Gradio-Blocks/README':\n", " blocks_spaces[spaces[i].id]=spaces[i].likes\n", " df = pd.DataFrame(\n", " [{\"Spaces_Name\": Spaces, \"likes\": likes} for Spaces,likes in blocks_spaces.items()])\n", " df = df.sort_values(by=['likes'],ascending=False)\n", " return df\n", "\n", "block = gr.Blocks()\n", "\n", "with block: \n", " gr.Markdown(\"\"\"Leaderboard for the most popular Blocks Event Spaces. To learn more and join, see Blocks Party Event\"\"\")\n", " with gr.Tabs():\n", " with gr.TabItem(\"Blocks Party Leaderboard\"):\n", " with gr.Row():\n", " data = gr.outputs.Dataframe(type=\"pandas\")\n", " with gr.Row():\n", " data_run = gr.Button(\"Refresh\")\n", " data_run.click(get_blocks_party_spaces, inputs=None, outputs=data)\n", " # running the function on page load in addition to when the button is clicked\n", " block.load(get_blocks_party_spaces, inputs=None, outputs=data) \n", "\n", "block.launch()\n", "\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/leaderboard/run.py b/testbed/gradio-app__gradio/demo/leaderboard/run.py new file mode 100644 index 0000000000000000000000000000000000000000..48e26218c99fd45df1524e6869c3381d8a14773c --- /dev/null +++ b/testbed/gradio-app__gradio/demo/leaderboard/run.py @@ -0,0 +1,36 @@ +import gradio as gr +import requests +import pandas as pd +from huggingface_hub.hf_api import SpaceInfo +path = f"https://huggingface.co/api/spaces" + + +def get_blocks_party_spaces(): + r = requests.get(path) + d = r.json() + spaces = [SpaceInfo(**x) for x in d] + blocks_spaces = {} + for i in range(0,len(spaces)): + if spaces[i].id.split('/')[0] == 'Gradio-Blocks' and hasattr(spaces[i], 'likes') and spaces[i].id != 'Gradio-Blocks/Leaderboard' and spaces[i].id != 'Gradio-Blocks/README': + blocks_spaces[spaces[i].id]=spaces[i].likes + df = pd.DataFrame( + [{"Spaces_Name": Spaces, "likes": likes} for Spaces,likes in blocks_spaces.items()]) + df = df.sort_values(by=['likes'],ascending=False) + return df + +block = gr.Blocks() + +with block: + gr.Markdown("""Leaderboard for the most popular Blocks Event Spaces. To learn more and join, see Blocks Party Event""") + with gr.Tabs(): + with gr.TabItem("Blocks Party Leaderboard"): + with gr.Row(): + data = gr.outputs.Dataframe(type="pandas") + with gr.Row(): + data_run = gr.Button("Refresh") + data_run.click(get_blocks_party_spaces, inputs=None, outputs=data) + # running the function on page load in addition to when the button is clicked + block.load(get_blocks_party_spaces, inputs=None, outputs=data) + +block.launch() + diff --git a/testbed/gradio-app__gradio/demo/logoutbutton_component/requirements.txt b/testbed/gradio-app__gradio/demo/logoutbutton_component/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..f7359a07d4b7db60977b67d788f94a41c00c0ccb --- /dev/null +++ b/testbed/gradio-app__gradio/demo/logoutbutton_component/requirements.txt @@ -0,0 +1 @@ +gradio[oauth] \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/logoutbutton_component/run.ipynb b/testbed/gradio-app__gradio/demo/logoutbutton_component/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..e42b7e828358d2d9d61ded2c58aa67315f916b5c --- /dev/null +++ b/testbed/gradio-app__gradio/demo/logoutbutton_component/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: logoutbutton_component"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio gradio[oauth]"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr \n", "\n", "with gr.Blocks() as demo:\n", " gr.LogoutButton()\n", "\n", "demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/logoutbutton_component/run.py b/testbed/gradio-app__gradio/demo/logoutbutton_component/run.py new file mode 100644 index 0000000000000000000000000000000000000000..05e04c6e8d3615ff67d5c2a7854c547442f3d6a5 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/logoutbutton_component/run.py @@ -0,0 +1,6 @@ +import gradio as gr + +with gr.Blocks() as demo: + gr.LogoutButton() + +demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/pictionary/class_names.txt b/testbed/gradio-app__gradio/demo/pictionary/class_names.txt new file mode 100644 index 0000000000000000000000000000000000000000..52a6caf89fcdb903620bccd0ad0f50b6d9ed6de4 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/pictionary/class_names.txt @@ -0,0 +1,100 @@ +airplane +alarm_clock +anvil +apple +axe +baseball +baseball_bat +basketball +beard +bed +bench +bicycle +bird +book +bread +bridge +broom +butterfly +camera +candle +car +cat +ceiling_fan +cell_phone +chair +circle +clock +cloud +coffee_cup +cookie +cup +diving_board +donut +door +drums +dumbbell +envelope +eye +eyeglasses +face +fan +flower +frying_pan +grapes +hammer +hat +headphones +helmet +hot_dog +ice_cream +key +knife +ladder +laptop +light_bulb +lightning +line +lollipop +microphone +moon +mountain +moustache +mushroom +pants +paper_clip +pencil +pillow +pizza +power_outlet +radio +rainbow +rifle +saw +scissors +screwdriver +shorts +shovel +smiley_face +snake +sock +spider +spoon +square +star +stop_sign +suitcase +sun +sword +syringe +t-shirt +table +tennis_racquet +tent +tooth +traffic_light +tree +triangle +umbrella +wheel +wristwatch diff --git a/testbed/gradio-app__gradio/demo/pictionary/requirements.txt b/testbed/gradio-app__gradio/demo/pictionary/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ed9a63afad7f8e3282493f6e2c2cadb74280f99c --- /dev/null +++ b/testbed/gradio-app__gradio/demo/pictionary/requirements.txt @@ -0,0 +1,3 @@ +torch +gdown +numpy \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/pictionary/run.ipynb b/testbed/gradio-app__gradio/demo/pictionary/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..a3200f294fba795499516eda3a6c087dcce0db05 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/pictionary/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: pictionary"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio torch gdown numpy"]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/pictionary/class_names.txt"]}, {"cell_type": "code", "execution_count": null, "id": 44380577570523278879349135829904343037, "metadata": {}, "outputs": [], "source": ["from pathlib import Path\n", "\n", "import numpy as np\n", "import torch\n", "import gradio as gr\n", "from torch import nn\n", "import gdown \n", "\n", "url = 'https://drive.google.com/uc?id=1dsk2JNZLRDjC-0J4wIQX_FcVurPaXaAZ'\n", "output = 'pytorch_model.bin'\n", "gdown.download(url, output, quiet=False)\n", "\n", "LABELS = Path('class_names.txt').read_text().splitlines()\n", "\n", "model = nn.Sequential(\n", " nn.Conv2d(1, 32, 3, padding='same'),\n", " nn.ReLU(),\n", " nn.MaxPool2d(2),\n", " nn.Conv2d(32, 64, 3, padding='same'),\n", " nn.ReLU(),\n", " nn.MaxPool2d(2),\n", " nn.Conv2d(64, 128, 3, padding='same'),\n", " nn.ReLU(),\n", " nn.MaxPool2d(2),\n", " nn.Flatten(),\n", " nn.Linear(1152, 256),\n", " nn.ReLU(),\n", " nn.Linear(256, len(LABELS)),\n", ")\n", "state_dict = torch.load('pytorch_model.bin', map_location='cpu')\n", "model.load_state_dict(state_dict, strict=False)\n", "model.eval()\n", "\n", "def predict(im):\n", " if im is None:\n", " return None\n", " im = np.asarray(im.resize((28, 28)))\n", " \n", " x = torch.tensor(im, dtype=torch.float32).unsqueeze(0).unsqueeze(0) / 255.\n", "\n", " with torch.no_grad():\n", " out = model(x)\n", "\n", " probabilities = torch.nn.functional.softmax(out[0], dim=0)\n", "\n", " values, indices = torch.topk(probabilities, 5)\n", "\n", " return {LABELS[i]: v.item() for i, v in zip(indices, values)}\n", "\n", "\n", "interface = gr.Interface(predict, \n", " inputs=gr.Sketchpad(label=\"Draw Here\", brush_radius=5, type=\"pil\", shape=(120, 120)), \n", " outputs=gr.Label(label=\"Guess\"), \n", " live=True)\n", "\n", "interface.queue().launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/pictionary/run.py b/testbed/gradio-app__gradio/demo/pictionary/run.py new file mode 100644 index 0000000000000000000000000000000000000000..ce9be4d1f3fa9f72a6cf516ab24484c8960d5e74 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/pictionary/run.py @@ -0,0 +1,56 @@ +from pathlib import Path + +import numpy as np +import torch +import gradio as gr +from torch import nn +import gdown + +url = 'https://drive.google.com/uc?id=1dsk2JNZLRDjC-0J4wIQX_FcVurPaXaAZ' +output = 'pytorch_model.bin' +gdown.download(url, output, quiet=False) + +LABELS = Path('class_names.txt').read_text().splitlines() + +model = nn.Sequential( + nn.Conv2d(1, 32, 3, padding='same'), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Conv2d(32, 64, 3, padding='same'), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Conv2d(64, 128, 3, padding='same'), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Flatten(), + nn.Linear(1152, 256), + nn.ReLU(), + nn.Linear(256, len(LABELS)), +) +state_dict = torch.load('pytorch_model.bin', map_location='cpu') +model.load_state_dict(state_dict, strict=False) +model.eval() + +def predict(im): + if im is None: + return None + im = np.asarray(im.resize((28, 28))) + + x = torch.tensor(im, dtype=torch.float32).unsqueeze(0).unsqueeze(0) / 255. + + with torch.no_grad(): + out = model(x) + + probabilities = torch.nn.functional.softmax(out[0], dim=0) + + values, indices = torch.topk(probabilities, 5) + + return {LABELS[i]: v.item() for i, v in zip(indices, values)} + + +interface = gr.Interface(predict, + inputs=gr.Sketchpad(label="Draw Here", brush_radius=5, type="pil", shape=(120, 120)), + outputs=gr.Label(label="Guess"), + live=True) + +interface.queue().launch() diff --git a/testbed/gradio-app__gradio/demo/random_demos.py b/testbed/gradio-app__gradio/demo/random_demos.py new file mode 100644 index 0000000000000000000000000000000000000000..6c206f4323ed8698bb60de67ffdab7f1cbcdfe5b --- /dev/null +++ b/testbed/gradio-app__gradio/demo/random_demos.py @@ -0,0 +1,40 @@ +"""Opens X demos randomly for quick inspection + +Usage: python random_demos.py +Example: python random_demos.py 8 + +Assumes: +- This is being run from the gradio/demo/ directory +""" + +from __future__ import annotations + +import argparse +import importlib +import os +import random + +import gradio as gr + +parser = argparse.ArgumentParser() +parser.add_argument("num_demos", help="number of demos to launch", type=int, default=4) +args = parser.parse_args() + +# get the list of directory names +demos_list = next(os.walk('.'))[1] + +# Some demos are just too large or need to be run in a special way, so we'll just skip them +demos_list.remove('streaming_wav2vec') +demos_list.remove('blocks_neural_instrument_coding') +demos_list.remove('flagged') + +for d, demo_name in enumerate(random.sample(demos_list, args.num_demos)): + print(f"Launching demo {d+1}/{args.num_demos}: {demo_name}") + # import the run.py file from inside the directory specified by args.demo_name + run = importlib.import_module(f"{demo_name}.run") + demo: gr.Blocks = run.demo + if d == args.num_demos - 1: + demo.launch(prevent_thread_lock=False, inbrowser=True) # prevent main thread from exiting + else: + demo.launch(prevent_thread_lock=True, inbrowser=True) + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/theme_soft/run.ipynb b/testbed/gradio-app__gradio/demo/theme_soft/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..35b788bb8e98cf6822d28b9c96622a27f54a50b6 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/theme_soft/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: theme_soft"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import time\n", "\n", "with gr.Blocks(theme=gr.themes.Soft()) as demo:\n", " textbox = gr.Textbox(label=\"Name\")\n", " slider = gr.Slider(label=\"Count\", minimum=0, maximum=100, step=1)\n", " with gr.Row():\n", " button = gr.Button(\"Submit\", variant=\"primary\")\n", " clear = gr.Button(\"Clear\")\n", " output = gr.Textbox(label=\"Output\")\n", "\n", " def repeat(name, count):\n", " time.sleep(3)\n", " return name * count\n", " \n", " button.click(repeat, [textbox, slider], output)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/timeseries_component/run.ipynb b/testbed/gradio-app__gradio/demo/timeseries_component/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..db959d32a74ed63ba3a5e419567413924fdc37b5 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/timeseries_component/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: timeseries_component"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr \n", "\n", "with gr.Blocks() as demo:\n", " gr.Timeseries()\n", "\n", "demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/timeseries_component/run.py b/testbed/gradio-app__gradio/demo/timeseries_component/run.py new file mode 100644 index 0000000000000000000000000000000000000000..0e65d8907fed677524ca5891941f0e6a98127c1f --- /dev/null +++ b/testbed/gradio-app__gradio/demo/timeseries_component/run.py @@ -0,0 +1,6 @@ +import gradio as gr + +with gr.Blocks() as demo: + gr.Timeseries() + +demo.launch() \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/waveform/run.ipynb b/testbed/gradio-app__gradio/demo/waveform/run.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..4da46e83bc908207563f01c2abfb083fcf01a042 --- /dev/null +++ b/testbed/gradio-app__gradio/demo/waveform/run.ipynb @@ -0,0 +1 @@ +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: waveform"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import random\n", "\n", "\n", "COLORS = [\n", " [\"#ff0000\", \"#00ff00\"],\n", " [\"#00ff00\", \"#0000ff\"],\n", " [\"#0000ff\", \"#ff0000\"],\n", "] \n", "\n", "def audio_waveform(audio, image):\n", " return (\n", " audio,\n", " gr.make_waveform(audio),\n", " gr.make_waveform(audio, animate=True),\n", " gr.make_waveform(audio, bg_image=image, bars_color=random.choice(COLORS)),\n", " )\n", "\n", "\n", "gr.Interface(\n", " audio_waveform,\n", " inputs=[gr.Audio(), gr.Image(type=\"filepath\")],\n", " outputs=[\n", " gr.Audio(),\n", " gr.Video(),\n", " gr.Video(),\n", " gr.Video(),\n", " ],\n", ").launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/testbed/gradio-app__gradio/demo/waveform/run.py b/testbed/gradio-app__gradio/demo/waveform/run.py new file mode 100644 index 0000000000000000000000000000000000000000..f86ed52d8eb60538b75c9f70f273b2585c9c382a --- /dev/null +++ b/testbed/gradio-app__gradio/demo/waveform/run.py @@ -0,0 +1,29 @@ +import gradio as gr +import random + + +COLORS = [ + ["#ff0000", "#00ff00"], + ["#00ff00", "#0000ff"], + ["#0000ff", "#ff0000"], +] + +def audio_waveform(audio, image): + return ( + audio, + gr.make_waveform(audio), + gr.make_waveform(audio, animate=True), + gr.make_waveform(audio, bg_image=image, bars_color=random.choice(COLORS)), + ) + + +gr.Interface( + audio_waveform, + inputs=[gr.Audio(), gr.Image(type="filepath")], + outputs=[ + gr.Audio(), + gr.Video(), + gr.Video(), + gr.Video(), + ], +).launch() diff --git a/testbed/gradio-app__gradio/gradio/.dockerignore b/testbed/gradio-app__gradio/gradio/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..450a3af270f5d285eb3a59a03593d06078b145eb --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/.dockerignore @@ -0,0 +1,2 @@ +templates/frontend +templates/frontend/**/* diff --git a/testbed/gradio-app__gradio/gradio/CHANGELOG.md b/testbed/gradio-app__gradio/gradio/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..a872a0424c2ae2c44b06eb7b7720ffee9fd95214 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/CHANGELOG.md @@ -0,0 +1,4003 @@ +# gradio + +## 3.45.1 + +### Fixes + +- [#5701](https://github.com/gradio-app/gradio/pull/5701) [`ee8eec1e5`](https://github.com/gradio-app/gradio/commit/ee8eec1e5e544a0127e0aa68c2522a7085b8ada5) - Fix for regression in rendering empty Markdown. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 3.45.0 + +### Features + +- [#5675](https://github.com/gradio-app/gradio/pull/5675) [`b619e6f6e`](https://github.com/gradio-app/gradio/commit/b619e6f6e4ca55334fb86da53790e45a8f978566) - Reorganize Docs Navbar and Fill in Gaps. Thanks [@aliabd](https://github.com/aliabd)! +- [#5669](https://github.com/gradio-app/gradio/pull/5669) [`c5e969559`](https://github.com/gradio-app/gradio/commit/c5e969559612f956afcdb0c6f7b22ab8275bc49a) - Fix small issues in docs and guides. Thanks [@aliabd](https://github.com/aliabd)! +- [#5682](https://github.com/gradio-app/gradio/pull/5682) [`c57f1b75e`](https://github.com/gradio-app/gradio/commit/c57f1b75e272c76b0af4d6bd0c7f44743ff34f26) - Fix functional tests. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5681](https://github.com/gradio-app/gradio/pull/5681) [`40de3d217`](https://github.com/gradio-app/gradio/commit/40de3d2178b61ebe424b6f6228f94c0c6f679bea) - add query parameters to the `gr.Request` object through the `query_params` attribute. Thanks [@DarhkVoyd](https://github.com/DarhkVoyd)! +- [#5653](https://github.com/gradio-app/gradio/pull/5653) [`ea0e00b20`](https://github.com/gradio-app/gradio/commit/ea0e00b207b4b90a10e9d054c4202d4e705a29ba) - Prevent Clients from accessing API endpoints that set `api_name=False`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5639](https://github.com/gradio-app/gradio/pull/5639) [`e1874aff8`](https://github.com/gradio-app/gradio/commit/e1874aff814d13b23f3e59ef239cc13e18ad3fa7) - Add `gr.on` listener method. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#5652](https://github.com/gradio-app/gradio/pull/5652) [`2e25d4305`](https://github.com/gradio-app/gradio/commit/2e25d430582264945ae3316acd04c4453a25ce38) - Pause autoscrolling if a user scrolls up in a `gr.Textbox` and resume autoscrolling if they go all the way down. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5642](https://github.com/gradio-app/gradio/pull/5642) [`21c7225bd`](https://github.com/gradio-app/gradio/commit/21c7225bda057117a9d3311854323520218720b5) - Improve plot rendering. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#5677](https://github.com/gradio-app/gradio/pull/5677) [`9f9af327c`](https://github.com/gradio-app/gradio/commit/9f9af327c9115356433ec837f349d6286730fb97) - [Refactoring] Convert async functions that don't contain `await` statements to normal functions. Thanks [@whitphx](https://github.com/whitphx)! +- [#5660](https://github.com/gradio-app/gradio/pull/5660) [`d76555a12`](https://github.com/gradio-app/gradio/commit/d76555a122b545f0df7c9e7c1ca7bd2a6e262c86) - Fix secondary hue bug in gr.themes.builder(). Thanks [@hellofreckles](https://github.com/hellofreckles)! +- [#5697](https://github.com/gradio-app/gradio/pull/5697) [`f4e4f82b5`](https://github.com/gradio-app/gradio/commit/f4e4f82b58a65efca9030a7e8e7c5ace60d8cc10) - Increase Slider clickable area. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5671](https://github.com/gradio-app/gradio/pull/5671) [`6a36c3b78`](https://github.com/gradio-app/gradio/commit/6a36c3b786700600d3826ce1e0629cc5308ddd47) - chore(deps): update dependency @types/prismjs to v1.26.1. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5240](https://github.com/gradio-app/gradio/pull/5240) [`da05e59a5`](https://github.com/gradio-app/gradio/commit/da05e59a53bbad15e5755a47f46685da18e1031e) - Cleanup of .update and .get_config per component. Thanks [@aliabid94](https://github.com/aliabid94)!/n get_config is removed, the config used is simply any attribute that is in the Block that shares a name with one of the constructor paramaters./n update is not removed for backwards compatibility, but deprecated. Instead return the component itself. Created a updateable decorator that simply checks to see if we're in an update, and if so, skips the constructor and wraps the args and kwargs in an update dictionary. easy peasy. +- [#5635](https://github.com/gradio-app/gradio/pull/5635) [`38fafb9e2`](https://github.com/gradio-app/gradio/commit/38fafb9e2a5509b444942e1d5dd48dffa20066f4) - Fix typos in Gallery docs. Thanks [@atesgoral](https://github.com/atesgoral)! +- [#5590](https://github.com/gradio-app/gradio/pull/5590) [`d1ad1f671`](https://github.com/gradio-app/gradio/commit/d1ad1f671caef9f226eb3965f39164c256d8615c) - Attach `elem_classes` selectors to layout elements, and an id to the Tab button (for targeting via CSS/JS). Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5554](https://github.com/gradio-app/gradio/pull/5554) [`75ddeb390`](https://github.com/gradio-app/gradio/commit/75ddeb390d665d4484667390a97442081b49a423) - Accessibility Improvements. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5598](https://github.com/gradio-app/gradio/pull/5598) [`6b1714386`](https://github.com/gradio-app/gradio/commit/6b17143868bdd2c1400af1199a01c1c0d5c27477) - Upgrade Pyodide to 0.24.0 and install the native orjson package. Thanks [@whitphx](https://github.com/whitphx)! + +### Fixes + +- [#5625](https://github.com/gradio-app/gradio/pull/5625) [`9ccc4794a`](https://github.com/gradio-app/gradio/commit/9ccc4794a72ce8319417119f6c370e7af3ffca6d) - Use ContextVar instead of threading.local(). Thanks [@cbensimon](https://github.com/cbensimon)! +- [#5602](https://github.com/gradio-app/gradio/pull/5602) [`54d21d3f1`](https://github.com/gradio-app/gradio/commit/54d21d3f18f2ddd4e796d149a0b41461f49c711b) - Ensure `HighlightedText` with `merge_elements` loads without a value. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5636](https://github.com/gradio-app/gradio/pull/5636) [`fb5964fb8`](https://github.com/gradio-app/gradio/commit/fb5964fb88082e7b956853b543c468116811cab9) - Fix bug in example cache loading event. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5633](https://github.com/gradio-app/gradio/pull/5633) [`341402337`](https://github.com/gradio-app/gradio/commit/34140233794c29d4722020e13c2d045da642dfae) - Allow Gradio apps containing `gr.Radio()`, `gr.Checkboxgroup()`, or `gr.Dropdown()` to be loaded with `gr.load()`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5616](https://github.com/gradio-app/gradio/pull/5616) [`7c34b434a`](https://github.com/gradio-app/gradio/commit/7c34b434aae0eb85f112a1dc8d66cefc7e2296b2) - Fix width and height issues that would cut off content in `gr.DataFrame`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5604](https://github.com/gradio-app/gradio/pull/5604) [`faad01f8e`](https://github.com/gradio-app/gradio/commit/faad01f8e10ef6d18249b1a4587477c59b74adb2) - Add `render_markdown` parameter to chatbot. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5593](https://github.com/gradio-app/gradio/pull/5593) [`88d43bd12`](https://github.com/gradio-app/gradio/commit/88d43bd124792d216da445adef932a2b02f5f416) - Fixes avatar image in chatbot being squashed. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5690](https://github.com/gradio-app/gradio/pull/5690) [`6b8c8afd9`](https://github.com/gradio-app/gradio/commit/6b8c8afd981fea984da568e9a0bd8bfc2a9c06c4) - Fix incorrect behavior of `gr.load()` with `gr.Examples`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5696](https://github.com/gradio-app/gradio/pull/5696) [`e51fcd5d5`](https://github.com/gradio-app/gradio/commit/e51fcd5d54315e8b65ee40e3de4dab17579ff6d5) - setting share=True on Spaces or in wasm should warn instead of raising error. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 3.44.4 + +### Features + +- [#5514](https://github.com/gradio-app/gradio/pull/5514) [`52f783175`](https://github.com/gradio-app/gradio/commit/52f7831751b432411e109bd41add4ab286023a8e) - refactor: Use package.json for version management. Thanks [@DarhkVoyd](https://github.com/DarhkVoyd)! +- [#5535](https://github.com/gradio-app/gradio/pull/5535) [`d29b1ab74`](https://github.com/gradio-app/gradio/commit/d29b1ab740784d8c70f9ab7bc38bbbf7dd3ff737) - Makes sliders consistent across all browsers. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! + +### Fixes + +- [#5587](https://github.com/gradio-app/gradio/pull/5587) [`e0d61b8ba`](https://github.com/gradio-app/gradio/commit/e0d61b8baa0f6293f53b9bdb1647d42f9ae2583a) - Fix `.clear()` events for audio and image. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5534](https://github.com/gradio-app/gradio/pull/5534) [`d9e9ae43f`](https://github.com/gradio-app/gradio/commit/d9e9ae43f5c52c1f729af5a20e5d4f754689d429) - Guide fixes, esp. streaming audio. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#5588](https://github.com/gradio-app/gradio/pull/5588) [`acdeff57e`](https://github.com/gradio-app/gradio/commit/acdeff57ece4672f943c374d537eaf47d3ec034f) - Allow multiple instances of Gradio with authentication to run on different ports. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 3.44.3 + +### Fixes + +- [#5562](https://github.com/gradio-app/gradio/pull/5562) [`50d9747d0`](https://github.com/gradio-app/gradio/commit/50d9747d061962cff7f60a8da648bb3781794102) - chore(deps): update dependency iframe-resizer to v4.3.7. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5550](https://github.com/gradio-app/gradio/pull/5550) [`4ed5902e7`](https://github.com/gradio-app/gradio/commit/4ed5902e7dda2d95cd43e4ccaaef520ddd8eba57) - Adding basque language. Thanks [@EkhiAzur](https://github.com/EkhiAzur)! +- [#5547](https://github.com/gradio-app/gradio/pull/5547) [`290f51871`](https://github.com/gradio-app/gradio/commit/290f5187160cdbd7a786494fe3c19b0e70abe167) - typo in UploadButton's docstring. Thanks [@chaeheum3](https://github.com/chaeheum3)! +- [#5553](https://github.com/gradio-app/gradio/pull/5553) [`d1bf23cd2`](https://github.com/gradio-app/gradio/commit/d1bf23cd2c6da3692d7753856bfe7564d84778e0) - Modify Image examples docstring. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5563](https://github.com/gradio-app/gradio/pull/5563) [`ba64082ed`](https://github.com/gradio-app/gradio/commit/ba64082ed80c1ed9113497ae089e63f032dbcc75) - preprocess for components when type='index'. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 3.44.2 + +### Fixes + +- [#5537](https://github.com/gradio-app/gradio/pull/5537) [`301c7878`](https://github.com/gradio-app/gradio/commit/301c7878217f9fc531c0f28330b394f02955811b) - allow gr.Image() examples to take urls. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5544](https://github.com/gradio-app/gradio/pull/5544) [`a0cc9ac9`](https://github.com/gradio-app/gradio/commit/a0cc9ac931554e06dcb091158c9b9ac0cc580b6c) - Fixes dropdown breaking if a user types in invalid value and presses enter. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 3.44.1 + +### Fixes + +- [#5516](https://github.com/gradio-app/gradio/pull/5516) [`c5fe8eba`](https://github.com/gradio-app/gradio/commit/c5fe8ebadbf206e2f4199ccde4606e331a22148a) - Fix docstring of dropdown. Thanks [@hysts](https://github.com/hysts)! +- [#5529](https://github.com/gradio-app/gradio/pull/5529) [`81c9ca9a`](https://github.com/gradio-app/gradio/commit/81c9ca9a2e00d19334f632fec32081d36ad54c7f) - Fix `.update()` method in `gr.Dropdown()` to handle `choices`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5528](https://github.com/gradio-app/gradio/pull/5528) [`dc86e4a7`](https://github.com/gradio-app/gradio/commit/dc86e4a7e1c40b910c74558e6f88fddf9b3292bc) - Lazy load all images. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#5525](https://github.com/gradio-app/gradio/pull/5525) [`21f1db40`](https://github.com/gradio-app/gradio/commit/21f1db40de6d1717eba97a550e11422a457ba7e9) - Ensure input value saves on dropdown blur. Thanks [@hannahblair](https://github.com/hannahblair)! + +## 3.44.0 + +### Features + +- [#5505](https://github.com/gradio-app/gradio/pull/5505) [`9ee20f49`](https://github.com/gradio-app/gradio/commit/9ee20f499f62c1fe5af6b8f84918b3a334eb1c8d) - Validate i18n file names with ISO-639x. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5475](https://github.com/gradio-app/gradio/pull/5475) [`c60b89b0`](https://github.com/gradio-app/gradio/commit/c60b89b0a54758a27277f0a6aa20d0653647c7c8) - Adding Central Kurdish. Thanks [@Hrazhan](https://github.com/Hrazhan)! +- [#5400](https://github.com/gradio-app/gradio/pull/5400) [`d112e261`](https://github.com/gradio-app/gradio/commit/d112e2611b0fc79ecedfaed367571f3157211387) - Allow interactive input in `gr.HighlightedText`. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5488](https://github.com/gradio-app/gradio/pull/5488) [`8909e42a`](https://github.com/gradio-app/gradio/commit/8909e42a7c6272358ad413588d27a5124d151205) - Adds `autoscroll` param to `gr.Textbox()`. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5384](https://github.com/gradio-app/gradio/pull/5384) [`ddc02268`](https://github.com/gradio-app/gradio/commit/ddc02268f731bd2ed04b7a5854accf3383f9a0da) - Allows the `gr.Dropdown` to have separate names and values, as well as enables `allow_custom_value` for multiselect dropdown. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5473](https://github.com/gradio-app/gradio/pull/5473) [`b271e738`](https://github.com/gradio-app/gradio/commit/b271e738860ca238ecdee2991f49b505c7559016) - Remove except asyncio.CancelledError which is no longer necessary due to 53d7025. Thanks [@whitphx](https://github.com/whitphx)! +- [#5474](https://github.com/gradio-app/gradio/pull/5474) [`041560f9`](https://github.com/gradio-app/gradio/commit/041560f9f11ca2560005b467bb412ee1becfc2b2) - Fix queueing.call_prediction to retrieve the default response class in the same manner as FastAPI's implementation. Thanks [@whitphx](https://github.com/whitphx)! +- [#5510](https://github.com/gradio-app/gradio/pull/5510) [`afcf3c48`](https://github.com/gradio-app/gradio/commit/afcf3c48e82712067d6d00a0caedb1562eb986f8) - Do not expose existence of files outside of working directory. Thanks [@abidlabs](https://github.com/abidlabs)! + +### Fixes + +- [#5459](https://github.com/gradio-app/gradio/pull/5459) [`bd2fda77`](https://github.com/gradio-app/gradio/commit/bd2fda77fc98d815f4fb670f535af453ebee9b80) - Dispatch `stop_recording` event in Audio. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5508](https://github.com/gradio-app/gradio/pull/5508) [`05715f55`](https://github.com/gradio-app/gradio/commit/05715f5599ae3e928d3183c7b0a7f5291f843a96) - Adds a `filterable` parameter to `gr.Dropdown` that controls whether user can type to filter choices. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5470](https://github.com/gradio-app/gradio/pull/5470) [`a4e010a9`](https://github.com/gradio-app/gradio/commit/a4e010a96f1d8a52b3ac645e03fe472b9c3cbbb1) - Fix share button position. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5496](https://github.com/gradio-app/gradio/pull/5496) [`82ec4d26`](https://github.com/gradio-app/gradio/commit/82ec4d2622a43c31b248b78e9410e2ac918f6035) - Allow interface with components to be run inside blocks. Thanks [@abidlabs](https://github.com/abidlabs)! + +## 3.43.2 + +### Fixes + +- [#5456](https://github.com/gradio-app/gradio/pull/5456) [`6e381c4f`](https://github.com/gradio-app/gradio/commit/6e381c4f146cc8177a4e2b8e39f914f09cd7ff0c) - ensure dataframe doesn't steal focus. Thanks [@pngwn](https://github.com/pngwn)! + +## 3.43.1 + +### Fixes + +- [#5445](https://github.com/gradio-app/gradio/pull/5445) [`67bb7bcb`](https://github.com/gradio-app/gradio/commit/67bb7bcb6a95b7a00a8bdf612cf147850d919a44) - ensure dataframe doesn't scroll unless needed. Thanks [@pngwn](https://github.com/pngwn)! +- [#5447](https://github.com/gradio-app/gradio/pull/5447) [`7a4a89e5`](https://github.com/gradio-app/gradio/commit/7a4a89e5ca1dedb39e5366867501584b0c636bbb) - ensure iframe is correct size on spaces. Thanks [@pngwn](https://github.com/pngwn)! + +## 3.43.0 + +### Features + +- [#5165](https://github.com/gradio-app/gradio/pull/5165) [`c77f05ab`](https://github.com/gradio-app/gradio/commit/c77f05abb65b2828c9c19af4ec0a0c09412f9f6a) - Fix the Queue to call API endpoints without internal HTTP routing. Thanks [@whitphx](https://github.com/whitphx)! +- [#5427](https://github.com/gradio-app/gradio/pull/5427) [`aad7acd7`](https://github.com/gradio-app/gradio/commit/aad7acd7128dca05b227ecbba06db9f94d65b088) - Add sort to bar plot. Thanks [@Chaitanya134](https://github.com/Chaitanya134)! +- [#5342](https://github.com/gradio-app/gradio/pull/5342) [`afac0006`](https://github.com/gradio-app/gradio/commit/afac0006337ce2840cf497cd65691f2f60ee5912) - significantly improve the performance of `gr.Dataframe` for large datasets. Thanks [@pngwn](https://github.com/pngwn)! +- [#5417](https://github.com/gradio-app/gradio/pull/5417) [`d14d63e3`](https://github.com/gradio-app/gradio/commit/d14d63e30c4af3f9c2a664fd11b0a01943a8300c) - Auto scroll to bottom of textbox. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! + +### Fixes + +- [#5412](https://github.com/gradio-app/gradio/pull/5412) [`26fef8c7`](https://github.com/gradio-app/gradio/commit/26fef8c7f85a006c7e25cdbed1792df19c512d02) - Skip view_api request in js client when auth enabled. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5436](https://github.com/gradio-app/gradio/pull/5436) [`7ab4b70f`](https://github.com/gradio-app/gradio/commit/7ab4b70f6821afb4e85cef225d1235c19df8ebbf) - api_open does not take precedence over show_api. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +## 3.42.0 + +### Highlights + +#### Like/Dislike Button for Chatbot ([#5391](https://github.com/gradio-app/gradio/pull/5391) [`abf1c57d`](https://github.com/gradio-app/gradio/commit/abf1c57d7d85de0df233ee3b38aeb38b638477db)) + + Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! + +#### Added the ability to attach event listeners via decorators ([#5395](https://github.com/gradio-app/gradio/pull/5395) [`55fed04f`](https://github.com/gradio-app/gradio/commit/55fed04f559becb9c24f22cc6292dc572d709886)) + +e.g. + +```python +with gr.Blocks() as demo: + name = gr.Textbox(label="Name") + output = gr.Textbox(label="Output Box") + greet_btn = gr.Button("Greet") + + @greet_btn.click(inputs=name, outputs=output) + def greet(name): + return "Hello " + name + "!" +``` + + Thanks [@aliabid94](https://github.com/aliabid94)! + +### Features + +- [#5334](https://github.com/gradio-app/gradio/pull/5334) [`c5bf9138`](https://github.com/gradio-app/gradio/commit/c5bf91385a632dc9f612499ee01166ac6ae509a9) - Add chat bubble width param. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5267](https://github.com/gradio-app/gradio/pull/5267) [`119c8343`](https://github.com/gradio-app/gradio/commit/119c834331bfae60d4742c8f20e9cdecdd67e8c2) - Faster reload mode. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5373](https://github.com/gradio-app/gradio/pull/5373) [`79d8f9d8`](https://github.com/gradio-app/gradio/commit/79d8f9d891901683c5a1b7486efb44eab2478c96) - Adds `height` and `zoom_speed` parameters to `Model3D` component, as well as a button to reset the camera position. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5370](https://github.com/gradio-app/gradio/pull/5370) [`61803c65`](https://github.com/gradio-app/gradio/commit/61803c6545e73fce47e8740bd46721ab9bb0ba5c) - chore(deps): update dependency extendable-media-recorder to v9. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5266](https://github.com/gradio-app/gradio/pull/5266) [`4ccb9a86`](https://github.com/gradio-app/gradio/commit/4ccb9a86f194c6997f80a09880edc3c2b0554aab) - Makes it possible to set the initial camera position for the `Model3D` component as a tuple of (alpha, beta, radius). Thanks [@mbahri](https://github.com/mbahri)! +- [#5271](https://github.com/gradio-app/gradio/pull/5271) [`97c3c7b1`](https://github.com/gradio-app/gradio/commit/97c3c7b1730407f9e80566af9ecb4ca7cccf62ff) - Move scripts from old website to CI. Thanks [@aliabd](https://github.com/aliabd)! +- [#5369](https://github.com/gradio-app/gradio/pull/5369) [`b8968898`](https://github.com/gradio-app/gradio/commit/b89688984fa9c6be0db06e392e6935a544620764) - Fix typo in utils.py. Thanks [@eltociear](https://github.com/eltociear)! + +### Fixes + +- [#5304](https://github.com/gradio-app/gradio/pull/5304) [`05892302`](https://github.com/gradio-app/gradio/commit/05892302fb8fe2557d57834970a2b65aea97355b) - Adds kwarg to disable html sanitization in `gr.Chatbot()`. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5366](https://github.com/gradio-app/gradio/pull/5366) [`0cc7e2dc`](https://github.com/gradio-app/gradio/commit/0cc7e2dcf60e216e0a30e2f85a9879ce3cb2a1bd) - Hide avatar when message none. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5393](https://github.com/gradio-app/gradio/pull/5393) [`e4e7a431`](https://github.com/gradio-app/gradio/commit/e4e7a4319924aaf51dcb18d07d0c9953d4011074) - Renders LaTeX that is added to the page in `gr.Markdown`, `gr.Chatbot`, and `gr.DataFrame`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5394](https://github.com/gradio-app/gradio/pull/5394) [`4d94ea0a`](https://github.com/gradio-app/gradio/commit/4d94ea0a0cf2103cda19f48398a5634f8341d04d) - Adds horizontal scrolling to content that overflows in gr.Markdown. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5368](https://github.com/gradio-app/gradio/pull/5368) [`b27f7583`](https://github.com/gradio-app/gradio/commit/b27f7583254165b135bf1496a7d8c489a62ba96f) - Change markdown rendering to set breaks to false. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5360](https://github.com/gradio-app/gradio/pull/5360) [`64666525`](https://github.com/gradio-app/gradio/commit/6466652583e3c620df995fb865ef3511a34cb676) - Cancel Dropdown Filter. Thanks [@deckar01](https://github.com/deckar01)! + +## 3.41.2 + +### Features + +- [#5284](https://github.com/gradio-app/gradio/pull/5284) [`5f25eb68`](https://github.com/gradio-app/gradio/commit/5f25eb6836f6a78ce6208b53495a01e1fc1a1d2f) - Minor bug fix sweep. Thanks [@aliabid94](https://github.com/aliabid94)!/n - Our use of __exit__ was catching errors and corrupting the traceback of any component that failed to instantiate (try running blocks_kitchen_sink off main for an example). Now the __exit__ exits immediately if there's been an exception, so the original exception can be printed cleanly/n - HighlightedText was rendering weird, cleaned it up + +### Fixes + +- [#5319](https://github.com/gradio-app/gradio/pull/5319) [`3341148c`](https://github.com/gradio-app/gradio/commit/3341148c109b5458cc88435d27eb154210efc472) - Fix: wrap avatar-image in a div to clip its shape. Thanks [@Keldos-Li](https://github.com/Keldos-Li)! +- [#5340](https://github.com/gradio-app/gradio/pull/5340) [`df090e89`](https://github.com/gradio-app/gradio/commit/df090e89f74a16e4cb2b700a1e3263cabd2bdd91) - Fix Checkbox select dispatch. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +## 3.41.1 + +### Fixes + +- [#5324](https://github.com/gradio-app/gradio/pull/5324) [`31996c99`](https://github.com/gradio-app/gradio/commit/31996c991d6bfca8cef975eb8e3c9f61a7aced19) - ensure login form has correct styles. Thanks [@pngwn](https://github.com/pngwn)! +- [#5323](https://github.com/gradio-app/gradio/pull/5323) [`e32b0928`](https://github.com/gradio-app/gradio/commit/e32b0928d2d00342ca917ebb10c379ffc2ec200d) - ensure dropdown stays open when identical data is passed in. Thanks [@pngwn](https://github.com/pngwn)! + +## 3.41.0 + +### Highlights + +#### Improve startup performance and markdown support ([#5279](https://github.com/gradio-app/gradio/pull/5279) [`fe057300`](https://github.com/gradio-app/gradio/commit/fe057300f0672c62dab9d9b4501054ac5d45a4ec)) + +##### Improved markdown support + +We now have better support for markdown in `gr.Markdown` and `gr.Dataframe`. Including syntax highlighting and Github Flavoured Markdown. We also have more consistent markdown behaviour and styling. + +##### Various performance improvements + +These improvements will be particularly beneficial to large applications. + +- Rather than attaching events manually, they are now delegated, leading to a significant performance improvement and addressing a performance regression introduced in a recent version of Gradio. App startup for large applications is now around twice as fast. +- Optimised the mounting of individual components, leading to a modest performance improvement during startup (~30%). +- Corrected an issue that was causing markdown to re-render infinitely. +- Ensured that the `gr.3DModel` does re-render prematurely. + + Thanks [@pngwn](https://github.com/pngwn)! + +#### Enable streaming audio in python client ([#5248](https://github.com/gradio-app/gradio/pull/5248) [`390624d8`](https://github.com/gradio-app/gradio/commit/390624d8ad2b1308a5bf8384435fd0db98d8e29e)) + +The `gradio_client` now supports streaming file outputs 🌊 + +No new syntax! Connect to a gradio demo that supports streaming file outputs and call `predict` or `submit` as you normally would. + +```python +import gradio_client as grc +client = grc.Client("gradio/stream_audio_out") + +# Get the entire generated audio as a local file +client.predict("/Users/freddy/Pictures/bark_demo.mp4", api_name="/predict") + +job = client.submit("/Users/freddy/Pictures/bark_demo.mp4", api_name="/predict") + +# Get the entire generated audio as a local file +job.result() + +# Each individual chunk +job.outputs() +``` + + Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +#### Add `render` function to `` ([#5158](https://github.com/gradio-app/gradio/pull/5158) [`804fcc05`](https://github.com/gradio-app/gradio/commit/804fcc058e147f283ece67f1f353874e26235535)) + +We now have an event `render` on the web component, which is triggered once the embedded space has finished rendering. + +```html + +``` + + Thanks [@hannahblair](https://github.com/hannahblair)! + +### Features + +- [#5268](https://github.com/gradio-app/gradio/pull/5268) [`f49028cf`](https://github.com/gradio-app/gradio/commit/f49028cfe3e21097001ddbda71c560b3d8b42e1c) - Move markdown & latex processing to the frontend for the gr.Markdown and gr.DataFrame components. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5215](https://github.com/gradio-app/gradio/pull/5215) [`fbdad78a`](https://github.com/gradio-app/gradio/commit/fbdad78af4c47454cbb570f88cc14bf4479bbceb) - Lazy load interactive or static variants of a component individually, rather than loading both variants regardless. This change will improve performance for many applications. Thanks [@pngwn](https://github.com/pngwn)! +- [#5216](https://github.com/gradio-app/gradio/pull/5216) [`4b58ea6d`](https://github.com/gradio-app/gradio/commit/4b58ea6d98e7a43b3f30d8a4cb6f379bc2eca6a8) - Update i18n tokens and locale files. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5283](https://github.com/gradio-app/gradio/pull/5283) [`a7460557`](https://github.com/gradio-app/gradio/commit/a74605572dd0d6bb41df6b38b120d656370dd67d) - Add height parameter and scrolling to `gr.Dataframe`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5232](https://github.com/gradio-app/gradio/pull/5232) [`c57d4c23`](https://github.com/gradio-app/gradio/commit/c57d4c232a97e03b4671f9e9edc3af456438fe89) - `gr.Radio` and `gr.CheckboxGroup` can now accept different names and values. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5219](https://github.com/gradio-app/gradio/pull/5219) [`e8fd4e4e`](https://github.com/gradio-app/gradio/commit/e8fd4e4ec68a6c974bc8c84b61f4a0ec50a85bc6) - Add `api_name` parameter to `gr.Interface`. Additionally, completely hide api page if show_api=False. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5280](https://github.com/gradio-app/gradio/pull/5280) [`a2f42e28`](https://github.com/gradio-app/gradio/commit/a2f42e28bd793bce4bed6d54164bb2a327a46fd5) - Allow updating the label of `gr.UpdateButton`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5112](https://github.com/gradio-app/gradio/pull/5112) [`1cefee7f`](https://github.com/gradio-app/gradio/commit/1cefee7fc05175aca23ba04b3a3fda7b97f49bf0) - chore(deps): update dependency marked to v7. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5260](https://github.com/gradio-app/gradio/pull/5260) [`a773eaf7`](https://github.com/gradio-app/gradio/commit/a773eaf7504abb53b99885b3454dc1e027adbb42) - Stop passing inputs and preprocessing on iterators. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#4943](https://github.com/gradio-app/gradio/pull/4943) [`947d615d`](https://github.com/gradio-app/gradio/commit/947d615db6f76519d0e8bc0d1a0d7edf89df267b) - Sign in with Hugging Face (OAuth support). Thanks [@Wauplin](https://github.com/Wauplin)! +- [#5298](https://github.com/gradio-app/gradio/pull/5298) [`cf167cd1`](https://github.com/gradio-app/gradio/commit/cf167cd1dd4acd9aee225ff1cb6fac0e849806ba) - Create event listener table for components on docs. Thanks [@aliabd](https://github.com/aliabd)! +- [#5173](https://github.com/gradio-app/gradio/pull/5173) [`730f0c1d`](https://github.com/gradio-app/gradio/commit/730f0c1d54792eb11359e40c9f2326e8a6e39203) - Ensure gradio client works as expected for functions that return nothing. Thanks [@raymondtri](https://github.com/raymondtri)! +- [#5188](https://github.com/gradio-app/gradio/pull/5188) [`b22e1888`](https://github.com/gradio-app/gradio/commit/b22e1888fcf0843520525c1e4b7e1fe73fdeb948) - Fix the images in the theme builder to use permanent URI. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5221](https://github.com/gradio-app/gradio/pull/5221) [`f344592a`](https://github.com/gradio-app/gradio/commit/f344592aeb1658013235ded154107f72d86f24e7) - Allows setting a height to `gr.File` and improves the UI of the component. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5265](https://github.com/gradio-app/gradio/pull/5265) [`06982212`](https://github.com/gradio-app/gradio/commit/06982212dfbd613853133d5d0eebd75577967027) - Removes scrollbar from File preview when not needed. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5305](https://github.com/gradio-app/gradio/pull/5305) [`15075241`](https://github.com/gradio-app/gradio/commit/15075241fa7ad3f7fd9ae2a91e54faf8f19a46f9) - Rotate axes labels on LinePlot, BarPlot, and ScatterPlot. Thanks [@Faiga91](https://github.com/Faiga91)! +- [#5258](https://github.com/gradio-app/gradio/pull/5258) [`92282cea`](https://github.com/gradio-app/gradio/commit/92282cea6afdf7e9930ece1046d8a63be34b3cea) - Chatbot Avatar Images. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5244](https://github.com/gradio-app/gradio/pull/5244) [`b3e50db9`](https://github.com/gradio-app/gradio/commit/b3e50db92f452f376aa2cc081326d40bb69d6dd7) - Remove aiohttp dependency. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5264](https://github.com/gradio-app/gradio/pull/5264) [`46a2b600`](https://github.com/gradio-app/gradio/commit/46a2b600a7ff030a9ea1560b882b3bf3ad266bbc) - ensure translations for audio work correctly. Thanks [@hannahblair](https://github.com/hannahblair)! + +### Fixes + +- [#5256](https://github.com/gradio-app/gradio/pull/5256) [`933db53e`](https://github.com/gradio-app/gradio/commit/933db53e93a1229fdf149556d61da5c4c7e1a331) - Better handling of empty dataframe in `gr.DataFrame`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5242](https://github.com/gradio-app/gradio/pull/5242) [`2b397791`](https://github.com/gradio-app/gradio/commit/2b397791fe2059e4beb72937ff0436f2d4d28b4b) - Fix message text overflow onto copy button in `gr.Chatbot`. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5253](https://github.com/gradio-app/gradio/pull/5253) [`ddac7e4d`](https://github.com/gradio-app/gradio/commit/ddac7e4d0f55c3bdc6c3e9a9e24588b2563e4049) - Ensure File component uploads files to the server. Thanks [@pngwn](https://github.com/pngwn)! +- [#5179](https://github.com/gradio-app/gradio/pull/5179) [`6fb92b48`](https://github.com/gradio-app/gradio/commit/6fb92b48a916104db573602011a448b904d42e5e) - Fixes audio streaming issues. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#5295](https://github.com/gradio-app/gradio/pull/5295) [`7b8fa8aa`](https://github.com/gradio-app/gradio/commit/7b8fa8aa58f95f5046b9add64b40368bd3f1b700) - Allow caching examples with streamed output. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#5285](https://github.com/gradio-app/gradio/pull/5285) [`cdfd4217`](https://github.com/gradio-app/gradio/commit/cdfd42174a9c777eaee9c1209bf8e90d8c7791f2) - Tweaks to `icon` parameter in `gr.Button()`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5122](https://github.com/gradio-app/gradio/pull/5122) [`3b805346`](https://github.com/gradio-app/gradio/commit/3b8053469aca6c7a86a6731e641e4400fc34d7d3) - Allows code block in chatbot to scroll horizontally. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! +- [#5312](https://github.com/gradio-app/gradio/pull/5312) [`f769cb67`](https://github.com/gradio-app/gradio/commit/f769cb67149d8e209091508f06d87014acaed965) - only start listening for events after the components are mounted. Thanks [@pngwn](https://github.com/pngwn)! +- [#5254](https://github.com/gradio-app/gradio/pull/5254) [`c39f06e1`](https://github.com/gradio-app/gradio/commit/c39f06e16b9feea97984e4822df35a99c807461c) - Fix `.update()` for `gr.Radio()` and `gr.CheckboxGroup()`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5231](https://github.com/gradio-app/gradio/pull/5231) [`87f1c2b4`](https://github.com/gradio-app/gradio/commit/87f1c2b4ac7c685c43477215fa5b96b6cbeffa05) - Allow `gr.Interface.from_pipeline()` and `gr.load()` to work within `gr.Blocks()`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5238](https://github.com/gradio-app/gradio/pull/5238) [`de23e9f7`](https://github.com/gradio-app/gradio/commit/de23e9f7d67e685e791faf48a21f34121f6d094a) - Improve audio streaming. Thanks [@aliabid94](https://github.com/aliabid94)!/n - Proper audio streaming with WAV files. We now do the proper processing to stream out wav files as a single stream of audio without any cracks in the seams./n - Audio streaming with bytes. Stream any audio type by yielding out bytes, and it should work flawlessly. +- [#5313](https://github.com/gradio-app/gradio/pull/5313) [`54bcb724`](https://github.com/gradio-app/gradio/commit/54bcb72417b2781ad9d7500ea0f89aa9d80f7d8f) - Restores missing part of bottom border on file component. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5235](https://github.com/gradio-app/gradio/pull/5235) [`1ecf88ac`](https://github.com/gradio-app/gradio/commit/1ecf88ac5f20bc5a1c91792d1a68559575e6afd7) - fix #5229. Thanks [@breengles](https://github.com/breengles)! +- [#5276](https://github.com/gradio-app/gradio/pull/5276) [`502f1015`](https://github.com/gradio-app/gradio/commit/502f1015bf23b365bc32446dd2e549b0c5d0dc72) - Ensure `Blocks` translation copy renders correctly. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5296](https://github.com/gradio-app/gradio/pull/5296) [`a0f22626`](https://github.com/gradio-app/gradio/commit/a0f22626f2aff297754414bbc83d5c4cfe086ea0) - `make_waveform()` twitter video resolution fix. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! + +## 3.40.0 + +### Highlights + +#### Client.predict will now return the final output for streaming endpoints ([#5057](https://github.com/gradio-app/gradio/pull/5057) [`35856f8b`](https://github.com/gradio-app/gradio/commit/35856f8b54548cae7bd3b8d6a4de69e1748283b2)) + +### This is a breaking change (for gradio_client only)! + +Previously, `Client.predict` would only return the first output of an endpoint that streamed results. This was causing confusion for developers that wanted to call these streaming demos via the client. + +We realize that developers using the client don't know the internals of whether a demo streams or not, so we're changing the behavior of predict to match developer expectations. + +Using `Client.predict` will now return the final output of a streaming endpoint. This will make it even easier to use gradio apps via the client. + + Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +#### Gradio now supports streaming audio outputs + +Allows users to use generators to stream audio out, yielding consecutive chunks of audio. Requires `streaming=True` to be set on the output audio. + +```python +import gradio as gr +from pydub import AudioSegment + +def stream_audio(audio_file): + audio = AudioSegment.from_mp3(audio_file) + i = 0 + chunk_size = 3000 + + while chunk_size*i < len(audio): + chunk = audio[chunk_size*i:chunk_size*(i+1)] + i += 1 + if chunk: + file = f"/tmp/{i}.mp3" + chunk.export(file, format="mp3") + yield file + +demo = gr.Interface( + fn=stream_audio, + inputs=gr.Audio(type="filepath", label="Audio file to stream"), + outputs=gr.Audio(autoplay=True, streaming=True), +) + +demo.queue().launch() +``` + +From the backend, streamed outputs are served from the `/stream/` endpoint instead of the `/file/` endpoint. Currently just used to serve audio streaming output. The output JSON will have `is_stream`: `true`, instead of `is_file`: `true` in the file data object. Thanks [@aliabid94](https://github.com/aliabid94)! + +### Features + +- [#5081](https://github.com/gradio-app/gradio/pull/5081) [`d7f83823`](https://github.com/gradio-app/gradio/commit/d7f83823fbd7604456b0127d689a63eed759807d) - solve how can I config root_path dynamically? #4968. Thanks [@eastonsuo](https://github.com/eastonsuo)! +- [#5025](https://github.com/gradio-app/gradio/pull/5025) [`6693660a`](https://github.com/gradio-app/gradio/commit/6693660a790996f8f481feaf22a8c49130d52d89) - Add download button to selected images in `Gallery`. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5133](https://github.com/gradio-app/gradio/pull/5133) [`61129052`](https://github.com/gradio-app/gradio/commit/61129052ed1391a75c825c891d57fa0ad6c09fc8) - Update dependency esbuild to ^0.19.0. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5125](https://github.com/gradio-app/gradio/pull/5125) [`80be7a1c`](https://github.com/gradio-app/gradio/commit/80be7a1ca44c0adef1668367b2cf36b65e52e576) - chatbot conversation nodes can contain a copy button. Thanks [@fazpu](https://github.com/fazpu)! +- [#5048](https://github.com/gradio-app/gradio/pull/5048) [`0b74a159`](https://github.com/gradio-app/gradio/commit/0b74a1595b30df744e32a2c358c07acb7fd1cfe5) - Use `importlib` in favor of deprecated `pkg_resources`. Thanks [@jayceslesar](https://github.com/jayceslesar)! +- [#5045](https://github.com/gradio-app/gradio/pull/5045) [`3b9494f5`](https://github.com/gradio-app/gradio/commit/3b9494f5c57e6b52e6a040ce8d6b5141f780e84d) - Lite: Fix the analytics module to use asyncio to work in the Wasm env. Thanks [@whitphx](https://github.com/whitphx)! +- [#5046](https://github.com/gradio-app/gradio/pull/5046) [`5244c587`](https://github.com/gradio-app/gradio/commit/5244c5873c355cf3e2f0acb7d67fda3177ef8b0b) - Allow new lines in `HighlightedText` with `/n` and preserve whitespace. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5076](https://github.com/gradio-app/gradio/pull/5076) [`2745075a`](https://github.com/gradio-app/gradio/commit/2745075a26f80e0e16863d483401ff1b6c5ada7a) - Add deploy_discord to docs. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5116](https://github.com/gradio-app/gradio/pull/5116) [`0dc49b4c`](https://github.com/gradio-app/gradio/commit/0dc49b4c517706f572240f285313a881089ced79) - Add support for async functions and async generators to `gr.ChatInterface`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5047](https://github.com/gradio-app/gradio/pull/5047) [`883ac364`](https://github.com/gradio-app/gradio/commit/883ac364f69d92128774ac446ce49bdf8415fd7b) - Add `step` param to `Number`. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5137](https://github.com/gradio-app/gradio/pull/5137) [`22aa5eba`](https://github.com/gradio-app/gradio/commit/22aa5eba3fee3f14473e4b0fac29cf72fe31ef04) - Use font size `--text-md` for `` in Chatbot messages. Thanks [@jaywonchung](https://github.com/jaywonchung)! +- [#5005](https://github.com/gradio-app/gradio/pull/5005) [`f5539c76`](https://github.com/gradio-app/gradio/commit/f5539c7618e31451420bd3228754774da14dc65f) - Enhancement: Add focus event to textbox and number component. Thanks [@JodyZ0203](https://github.com/JodyZ0203)! +- [#5104](https://github.com/gradio-app/gradio/pull/5104) [`34f6b22e`](https://github.com/gradio-app/gradio/commit/34f6b22efbfedfa569d452f3f99ed2e6593e3c21) - Strip leading and trailing spaces from username in login route. Thanks [@sweep-ai](https://github.com/apps/sweep-ai)! +- [#5149](https://github.com/gradio-app/gradio/pull/5149) [`144df459`](https://github.com/gradio-app/gradio/commit/144df459a3b7895e524defcfc4c03fbb8b083aca) - Add `show_edit_button` param to `gr.Audio`. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5136](https://github.com/gradio-app/gradio/pull/5136) [`eaa1ce14`](https://github.com/gradio-app/gradio/commit/eaa1ce14ac41de1c23321e93f11f1b03a2f3c7f4) - Enhancing Tamil Translation: Language Refinement 🌟. Thanks [@sanjaiyan-dev](https://github.com/sanjaiyan-dev)! +- [#5035](https://github.com/gradio-app/gradio/pull/5035) [`8b4eb8ca`](https://github.com/gradio-app/gradio/commit/8b4eb8cac9ea07bde31b44e2006ca2b7b5f4de36) - JS Client: Fixes cannot read properties of null (reading 'is_file'). Thanks [@raymondtri](https://github.com/raymondtri)! +- [#5023](https://github.com/gradio-app/gradio/pull/5023) [`e6317d77`](https://github.com/gradio-app/gradio/commit/e6317d77f87d3dad638acca3dbc4a9228570e63c) - Update dependency extendable-media-recorder to v8. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5085](https://github.com/gradio-app/gradio/pull/5085) [`13e47835`](https://github.com/gradio-app/gradio/commit/13e478353532c4af18cfa50772f8b6fb3c6c9818) - chore(deps): update dependency extendable-media-recorder to v8. Thanks [@renovate](https://github.com/apps/renovate)! +- [#5080](https://github.com/gradio-app/gradio/pull/5080) [`37caa2e0`](https://github.com/gradio-app/gradio/commit/37caa2e0fe95d6cab8beb174580fb557904f137f) - Add icon and link params to `gr.Button`. Thanks [@hannahblair](https://github.com/hannahblair)! + +### Fixes + +- [#5062](https://github.com/gradio-app/gradio/pull/5062) [`7d897165`](https://github.com/gradio-app/gradio/commit/7d89716519d0751072792c9bbda668ffeb597296) - `gr.Dropdown` now has correct behavior in static mode as well as when an option is selected. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5077](https://github.com/gradio-app/gradio/pull/5077) [`667875b2`](https://github.com/gradio-app/gradio/commit/667875b2441753e74d25bd9d3c8adedd8ede11cd) - Live audio streaming output +- [#5118](https://github.com/gradio-app/gradio/pull/5118) [`1b017e68`](https://github.com/gradio-app/gradio/commit/1b017e68f6a9623cc2ec085bd20e056229552028) - Add `interactive` args to `gr.ColorPicker`. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5114](https://github.com/gradio-app/gradio/pull/5114) [`56d2609d`](https://github.com/gradio-app/gradio/commit/56d2609de93387a75dc82b1c06c1240c5b28c0b8) - Reset textbox value to empty string when value is None. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#5075](https://github.com/gradio-app/gradio/pull/5075) [`67265a58`](https://github.com/gradio-app/gradio/commit/67265a58027ef1f9e4c0eb849a532f72eaebde48) - Allow supporting >1000 files in `gr.File()` and `gr.UploadButton()`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5135](https://github.com/gradio-app/gradio/pull/5135) [`80727bbe`](https://github.com/gradio-app/gradio/commit/80727bbe2c6d631022054edf01515017691b3bdd) - Fix dataset features and dataset preview for HuggingFaceDatasetSaver. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! +- [#5039](https://github.com/gradio-app/gradio/pull/5039) [`620e4645`](https://github.com/gradio-app/gradio/commit/620e46452729d6d4877b3fab84a65daf2f2b7bc6) - `gr.Dropdown()` now supports values with arbitrary characters and doesn't clear value when re-focused. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5061](https://github.com/gradio-app/gradio/pull/5061) [`136adc9c`](https://github.com/gradio-app/gradio/commit/136adc9ccb23e5cb4d02d2e88f23f0b850041f98) - Ensure `gradio_client` is backwards compatible with `gradio==3.24.1`. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5129](https://github.com/gradio-app/gradio/pull/5129) [`97d804c7`](https://github.com/gradio-app/gradio/commit/97d804c748be9acfe27b8369dd2d64d61f43c2e7) - [Spaces] ZeroGPU Queue fix. Thanks [@cbensimon](https://github.com/cbensimon)! +- [#5140](https://github.com/gradio-app/gradio/pull/5140) [`cd1353fa`](https://github.com/gradio-app/gradio/commit/cd1353fa3eb1b015f5860ca5d5a8e8d1aa4a831c) - Fixes the display of minutes in the video player. Thanks [@abidlabs](https://github.com/abidlabs)! +- [#5111](https://github.com/gradio-app/gradio/pull/5111) [`b84a35b7`](https://github.com/gradio-app/gradio/commit/b84a35b7b91eca947f787648ceb361b1d023427b) - Add icon and link to DuplicateButton. Thanks [@aliabd](https://github.com/aliabd)! +- [#5030](https://github.com/gradio-app/gradio/pull/5030) [`f6c491b0`](https://github.com/gradio-app/gradio/commit/f6c491b079d335af633dd854c68eb26f9e61c552) - highlightedtext throws an error basing on model. Thanks [@rajeunoia](https://github.com/rajeunoia)! + +## 3.39.0 + +### Highlights + +#### Create Discord Bots from Gradio Apps 🤖 ([#4960](https://github.com/gradio-app/gradio/pull/4960) [`46e4ef67`](https://github.com/gradio-app/gradio/commit/46e4ef67d287dd68a91473b73172b29cbad064bc)) + +We're excited to announce that Gradio can now automatically create a discord bot from any `gr.ChatInterface` app. + +It's as easy as importing `gradio_client`, connecting to the app, and calling `deploy_discord`! + +_🦙 Turning Llama 2 70b into a discord bot 🦙_ + +```python +import gradio_client as grc +grc.Client("ysharma/Explore_llamav2_with_TGI").deploy_discord(to_id="llama2-70b-discord-bot") +``` + + + +#### Getting started with template spaces + +To help get you started, we have created an organization on Hugging Face called [gradio-discord-bots](https://huggingface.co/gradio-discord-bots) with template spaces you can use to turn state of the art LLMs powered by Gradio to discord bots. + +Currently we have template spaces for: + +- [Llama-2-70b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-70b-chat-hf) powered by a FREE Hugging Face Inference Endpoint! +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-13b-chat-hf) powered by Hugging Face Inference Endpoints. +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/llama-2-13b-chat-transformers) powered by Hugging Face transformers. +- [falcon-7b-instruct](https://huggingface.co/spaces/gradio-discord-bots/falcon-7b-instruct) powered by Hugging Face Inference Endpoints. +- [gpt-3.5-turbo](https://huggingface.co/spaces/gradio-discord-bots/gpt-35-turbo), powered by openai. Requires an OpenAI key. + +But once again, you can deploy ANY `gr.ChatInterface` app exposed on the internet! So don't hesitate to try it on your own Chatbots. + +❗️ Additional Note ❗️: Technically, any gradio app that exposes an api route that takes in a single string and outputs a single string can be deployed to discord. But `gr.ChatInterface` apps naturally lend themselves to discord's chat functionality so we suggest you start with those. + +Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Features + +- [#4995](https://github.com/gradio-app/gradio/pull/4995) [`3f8c210b`](https://github.com/gradio-app/gradio/commit/3f8c210b01ef1ceaaf8ee73be4bf246b5b745bbf) - Implement left and right click in `Gallery` component and show implicit images in `Gallery` grid. Thanks [@hannahblair](https://github.com/hannahblair)! +- [#4993](https://github.com/gradio-app/gradio/pull/4993) [`dc07a9f9`](https://github.com/gradio-app/gradio/commit/dc07a9f947de44b419d8384987a02dcf94977851) - Bringing back the "Add download button for audio" PR by [@leuryr](https://github.com/leuryr). Thanks [@abidlabs](https://github.com/abidlabs)! +- [#4979](https://github.com/gradio-app/gradio/pull/4979) [`44ac8ad0`](https://github.com/gradio-app/gradio/commit/44ac8ad08d82ea12c503dde5c78f999eb0452de2) - Allow setting sketch color default. Thanks [@aliabid94](https://github.com/aliabid94)! +- [#4985](https://github.com/gradio-app/gradio/pull/4985) [`b74f8453`](https://github.com/gradio-app/gradio/commit/b74f8453034328f0e42da8e41785f5eb039b45d7) - Adds `additional_inputs` to `gr.ChatInterface`. Thanks [@abidlabs](https://github.com/abidlabs)! + +### Fixes + +- [#4997](https://github.com/gradio-app/gradio/pull/4997) [`41c83070`](https://github.com/gradio-app/gradio/commit/41c83070b01632084e7d29123048a96c1e261407) - Add CSS resets and specifiers to play nice with HF blog. Thanks [@aliabid94](https://github.com/aliabid94)! + +## 3.38 + +### New Features: + +- Provide a parameter `animate` (`False` by default) in `gr.make_waveform()` which animates the overlayed waveform by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4918](https://github.com/gradio-app/gradio/pull/4918) +- Add `show_download_button` param to allow the download button in static Image components to be hidden by [@hannahblair](https://github.com/hannahblair) in [PR 4959](https://github.com/gradio-app/gradio/pull/4959) +- Added autofocus argument to Textbox by [@aliabid94](https://github.com/aliabid94) in [PR 4978](https://github.com/gradio-app/gradio/pull/4978) +- The `gr.ChatInterface` UI now converts the "Submit" button to a "Stop" button in ChatInterface while streaming, which can be used to pause generation. By [@abidlabs](https://github.com/abidlabs) in [PR 4971](https://github.com/gradio-app/gradio/pull/4971). +- Add a `border_color_accent_subdued` theme variable to add a subdued border color to accented items. This is used by chatbot user messages. Set the value of this variable in `Default` theme to `*primary_200`. By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4989](https://github.com/gradio-app/gradio/pull/4989) +- Add default sketch color argument `brush_color`. Also, masks drawn on images are now slightly translucent (and mask color can also be set via brush_color). By [@aliabid94](https://github.com/aliabid94) in [PR 4979](https://github.com/gradio-app/gradio/pull/4979) + +### Bug Fixes: + +- Fixes `cancels` for generators so that if a generator is canceled before it is complete, subsequent runs of the event do not continue from the previous iteration, but rather start from the beginning. By [@abidlabs](https://github.com/abidlabs) in [PR 4969](https://github.com/gradio-app/gradio/pull/4969). +- Use `gr.State` in `gr.ChatInterface` to reduce latency by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4976](https://github.com/gradio-app/gradio/pull/4976) +- Fix bug with `gr.Interface` where component labels inferred from handler parameters were including special args like `gr.Request` or `gr.EventData`. By [@cbensimon](https://github.com/cbensimon) in [PR 4956](https://github.com/gradio-app/gradio/pull/4956) + +### Breaking Changes: + +No changes to highlight. + +### Other Changes: + +- Apply pyright to the `components` directory by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4948](https://github.com/gradio-app/gradio/pull/4948) +- Improved look of ChatInterface by [@aliabid94](https://github.com/aliabid94) in [PR 4978](https://github.com/gradio-app/gradio/pull/4978) + +## 3.37 + +### New Features: + +Introducing a new `gr.ChatInterface` abstraction, which allows Gradio users to build fully functioning Chat interfaces very easily. The only required parameter is a chat function `fn`, which accepts a (string) user input `message` and a (list of lists) chat `history` and returns a (string) response. Here's a toy example: + +```py +import gradio as gr + +def echo(message, history): + return message + +demo = gr.ChatInterface(fn=echo, examples=["hello", "hola", "merhaba"], title="Echo Bot") +demo.launch() +``` + +Which produces: + +image + +And a corresponding easy-to-use API at `/chat`: + +image + +The `gr.ChatInterface` abstraction works nicely with various LLM libraries, such as `langchain`. See the [dedicated guide](https://gradio.app/guides/creating-a-chatbot-fast) for more examples using `gr.ChatInterface`. Collective team effort in [PR 4869](https://github.com/gradio-app/gradio/pull/4869) + +- Chatbot messages now show hyperlinks to download files uploaded to `gr.Chatbot()` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4848](https://github.com/gradio-app/gradio/pull/4848) +- Cached examples now work with generators and async generators by [@abidlabs](https://github.com/abidlabs) in [PR 4927](https://github.com/gradio-app/gradio/pull/4927) +- Add RTL support to `gr.Markdown`, `gr.Chatbot`, `gr.Textbox` (via the `rtl` boolean parameter) and text-alignment to `gr.Textbox`(via the string `text_align` parameter) by [@abidlabs](https://github.com/abidlabs) in [PR 4933](https://github.com/gradio-app/gradio/pull/4933) + +Examples of usage: + +```py +with gr.Blocks() as demo: + gr.Textbox(interactive=True, text_align="right") +demo.launch() +``` + +```py +with gr.Blocks() as demo: + gr.Markdown("سلام", rtl=True) +demo.launch() +``` + +- The `get_api_info` method of `Blocks` now supports layout output components [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4871](https://github.com/gradio-app/gradio/pull/4871) + +- Added the support for the new command `gradio environment`to make it easier for people to file bug reports if we shipped an easy command to list the OS, gradio version, and versions of gradio/gradio-client dependencies. bu [@varshneydevansh](https://github.com/varshneydevansh) in [PR 4915](https://github.com/gradio-app/gradio/pull/4915). + +### Bug Fixes: + +- The `.change()` event is fixed in `Video` and `Image` so that it only fires once by [@abidlabs](https://github.com/abidlabs) in [PR 4793](https://github.com/gradio-app/gradio/pull/4793) +- The `.change()` event is fixed in `Audio` so that fires when the component value is programmatically updated by [@abidlabs](https://github.com/abidlabs) in [PR 4793](https://github.com/gradio-app/gradio/pull/4793) + +* Add missing `display: flex` property to `Row` so that flex styling is applied to children by [@hannahblair] in [PR 4896](https://github.com/gradio-app/gradio/pull/4896) +* Fixed bug where `gr.Video` could not preprocess urls by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4904](https://github.com/gradio-app/gradio/pull/4904) +* Fixed copy button rendering in API page on Safari by [@aliabid94](https://github.com/aliabid94) in [PR 4924](https://github.com/gradio-app/gradio/pull/4924) +* Fixed `gr.Group` and `container=False`. `container` parameter only available for `Textbox`, `Number`, and `Dropdown`, the only elements where it makes sense. By [@aliabid94](https://github.com/aliabid94) in [PR 4916](https://github.com/gradio-app/gradio/pull/4916) +* Fixed broken image link in auto-generated `app.py` from `ThemeClass.push_to_hub` by [@deepkyu](https://github.com/deepkyu) in [PR 4944](https://github.com/gradio-app/gradio/pull/4944) + +### Other Changes: + +- Warning on mobile that if a user leaves the tab, websocket connection may break. On broken connection, tries to rejoin queue and displays error conveying connection broke. By [@aliabid94](https://github.com/aliabid94) in [PR 4742](https://github.com/gradio-app/gradio/pull/4742) +- Remove blocking network calls made before the local URL gets printed - these slow down the display of the local URL, especially when no internet is available. [@aliabid94](https://github.com/aliabid94) in [PR 4905](https://github.com/gradio-app/gradio/pull/4905). +- Pinned dependencies to major versions to reduce the likelihood of a broken `gradio` due to changes in downstream dependencies by [@abidlabs](https://github.com/abidlabs) in [PR 4885](https://github.com/gradio-app/gradio/pull/4885) +- Queue `max_size` defaults to parent Blocks `max_thread` when running on Spaces with ZeroGPU hardware. By [@cbensimon](https://github.com/cbensimon) in [PR 4937](https://github.com/gradio-app/gradio/pull/4937) + +### Breaking Changes: + +Motivated by the release of `pydantic==2.0`, which included breaking changes that broke a large number of Gradio apps, we've pinned many gradio dependencies. Note that pinned dependencies can cause downstream conflicts, so this may be a breaking change. That being said, we've kept the pins pretty loose, and we're expecting change to be better for the long-term stability of Gradio apps. + +## 3.36.1 + +### New Features: + +- Hotfix to support pydantic v1 and v2 by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4835](https://github.com/gradio-app/gradio/pull/4835) + +### Bug Fixes: + +- Fix bug where `gr.File` change event was not triggered when the value was changed by another event by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4811](https://github.com/gradio-app/gradio/pull/4811) + +### Other Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +## 3.36.0 + +### New Features: + +- The `gr.Video`, `gr.Audio`, `gr.Image`, `gr.Chatbot`, and `gr.Gallery` components now include a share icon when deployed on Spaces. This behavior can be modified by setting the `show_share_button` parameter in the component classes. by [@aliabid94](https://github.com/aliabid94) in [PR 4651](https://github.com/gradio-app/gradio/pull/4651) +- Allow the web component `space`, `src`, and `host` attributes to be updated dynamically by [@pngwn](https://github.com/pngwn) in [PR 4461](https://github.com/gradio-app/gradio/pull/4461) +- Suggestion for Spaces Duplication built into Gradio, by [@aliabid94](https://github.com/aliabid94) in [PR 4458](https://github.com/gradio-app/gradio/pull/4458) +- The `api_name` parameter now accepts `False` as a value, which means it does not show up in named or unnamed endpoints. By [@abidlabs](https://github.com/aliabid94) in [PR 4683](https://github.com/gradio-app/gradio/pull/4683) +- Added support for `pathlib.Path` in `gr.Video`, `gr.Gallery`, and `gr.Chatbot` by [sunilkumardash9](https://github.com/sunilkumardash9) in [PR 4581](https://github.com/gradio-app/gradio/pull/4581). + +### Bug Fixes: + +- Updated components with `info` attribute to update when `update()` is called on them. by [@jebarpg](https://github.com/jebarpg) in [PR 4715](https://github.com/gradio-app/gradio/pull/4715). +- Ensure the `Image` components undo button works mode is `mask` or `color-sketch` by [@amyorz](https://github.com/AmyOrz) in [PR 4692](https://github.com/gradio-app/gradio/pull/4692) +- Load the iframe resizer external asset asynchronously, by [@akx](https://github.com/akx) in [PR 4336](https://github.com/gradio-app/gradio/pull/4336) +- Restored missing imports in `gr.components` by [@abidlabs](https://github.com/abidlabs) in [PR 4566](https://github.com/gradio-app/gradio/pull/4566) +- Fix bug where `select` event was not triggered in `gr.Gallery` if `height` was set to be large with `allow_preview=False` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4551](https://github.com/gradio-app/gradio/pull/4551) +- Fix bug where setting `visible=False` in `gr.Group` event did not work by [@abidlabs](https://github.com/abidlabs) in [PR 4567](https://github.com/gradio-app/gradio/pull/4567) +- Fix `make_waveform` to work with paths that contain spaces [@akx](https://github.com/akx) in [PR 4570](https://github.com/gradio-app/gradio/pull/4570) & [PR 4578](https://github.com/gradio-app/gradio/pull/4578) +- Send captured data in `stop_recording` event for `gr.Audio` and `gr.Video` components by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4554](https://github.com/gradio-app/gradio/pull/4554) +- Fix bug in `gr.Gallery` where `height` and `object_fit` parameters where being ignored by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4576](https://github.com/gradio-app/gradio/pull/4576) +- Fixes an HTML sanitization issue in DOMPurify where links in markdown were not opening in a new window by [@hannahblair] in [PR 4577](https://github.com/gradio-app/gradio/pull/4577) +- Fixed Dropdown height rendering in Columns by [@aliabid94](https://github.com/aliabid94) in [PR 4584](https://github.com/gradio-app/gradio/pull/4584) +- Fixed bug where `AnnotatedImage` css styling was causing the annotation masks to not be displayed correctly by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4628](https://github.com/gradio-app/gradio/pull/4628) +- Ensure that Gradio does not silently fail when running on a port that is occupied by [@abidlabs](https://github.com/abidlabs) in [PR 4624](https://github.com/gradio-app/gradio/pull/4624). +- Fix double upload bug that caused lag in file uploads by [@aliabid94](https://github.com/aliabid94) in [PR 4661](https://github.com/gradio-app/gradio/pull/4661) +- `Progress` component now appears even when no `iterable` is specified in `tqdm` constructor by [@itrushkin](https://github.com/itrushkin) in [PR 4475](https://github.com/gradio-app/gradio/pull/4475) +- Deprecation warnings now point at the user code using those deprecated features, instead of Gradio internals, by (https://github.com/akx) in [PR 4694](https://github.com/gradio-app/gradio/pull/4694) +- Adapt column widths in gr.Examples based on content by [@pngwn](https://github.com/pngwn) & [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4700](https://github.com/gradio-app/gradio/pull/4700) +- The `plot` parameter deprecation warnings should now only be emitted for `Image` components by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4709](https://github.com/gradio-app/gradio/pull/4709) +- Removed uncessessary `type` deprecation warning by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4709](https://github.com/gradio-app/gradio/pull/4709) +- Ensure Audio autoplays works when `autoplay=True` and the video source is dynamically updated [@pngwn](https://github.com/pngwn) in [PR 4705](https://github.com/gradio-app/gradio/pull/4705) +- When an error modal is shown in spaces, ensure we scroll to the top so it can be seen by [@pngwn](https://github.com/pngwn) in [PR 4712](https://github.com/gradio-app/gradio/pull/4712) +- Update depedencies by [@pngwn](https://github.com/pngwn) in [PR 4675](https://github.com/gradio-app/gradio/pull/4675) +- Fixes `gr.Dropdown` being cutoff at the bottom by [@abidlabs](https://github.com/abidlabs) in [PR 4691](https://github.com/gradio-app/gradio/pull/4691). +- Scroll top when clicking "View API" in spaces by [@pngwn](https://github.com/pngwn) in [PR 4714](https://github.com/gradio-app/gradio/pull/4714) +- Fix bug where `show_label` was hiding the entire component for `gr.Label` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4713](https://github.com/gradio-app/gradio/pull/4713) +- Don't crash when uploaded image has broken EXIF data, by [@akx](https://github.com/akx) in [PR 4764](https://github.com/gradio-app/gradio/pull/4764) +- Place toast messages at the top of the screen by [@pngwn](https://github.com/pngwn) in [PR 4796](https://github.com/gradio-app/gradio/pull/4796) +- Fix regressed styling of Login page when auth is enabled by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4797](https://github.com/gradio-app/gradio/pull/4797) +- Prevent broken scrolling to output on Spaces by [@aliabid94](https://github.com/aliabid94) in [PR 4822](https://github.com/gradio-app/gradio/pull/4822) + +### Other Changes: + +- Add `.git-blame-ignore-revs` by [@akx](https://github.com/akx) in [PR 4586](https://github.com/gradio-app/gradio/pull/4586) +- Update frontend dependencies in [PR 4601](https://github.com/gradio-app/gradio/pull/4601) +- Use `typing.Literal` where possible in gradio library and client by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4608](https://github.com/gradio-app/gradio/pull/4608) +- Remove unnecessary mock json files for frontend E2E tests by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4625](https://github.com/gradio-app/gradio/pull/4625) +- Update dependencies by [@pngwn](https://github.com/pngwn) in [PR 4643](https://github.com/gradio-app/gradio/pull/4643) +- The theme builder now launches successfully, and the API docs are cleaned up. By [@abidlabs](https://github.com/aliabid94) in [PR 4683](https://github.com/gradio-app/gradio/pull/4683) +- Remove `cleared_value` from some components as its no longer used internally by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4685](https://github.com/gradio-app/gradio/pull/4685) +- Better errors when you define two Blocks and reference components in one Blocks from the events in the other Blocks [@abidlabs](https://github.com/abidlabs) in [PR 4738](https://github.com/gradio-app/gradio/pull/4738). +- Better message when share link is not created by [@abidlabs](https://github.com/abidlabs) in [PR 4773](https://github.com/gradio-app/gradio/pull/4773). +- Improve accessibility around selected images in gr.Gallery component by [@hannahblair](https://github.com/hannahblair) in [PR 4790](https://github.com/gradio-app/gradio/pull/4790) + +### Breaking Changes: + +[PR 4683](https://github.com/gradio-app/gradio/pull/4683) removes the explict named endpoint "load_examples" from gr.Interface that was introduced in [PR 4456](https://github.com/gradio-app/gradio/pull/4456). + +## 3.35.2 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fix chatbot streaming by [@aliabid94](https://github.com/aliabid94) in [PR 4537](https://github.com/gradio-app/gradio/pull/4537) +- Fix chatbot height and scrolling by [@aliabid94](https://github.com/aliabid94) in [PR 4540](https://github.com/gradio-app/gradio/pull/4540) + +### Other Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +## 3.35.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fix chatbot streaming by [@aliabid94](https://github.com/aliabid94) in [PR 4537](https://github.com/gradio-app/gradio/pull/4537) +- Fix error modal position and text size by [@pngwn](https://github.com/pngwn) in [PR 4538](https://github.com/gradio-app/gradio/pull/4538). + +### Other Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +## 3.35.0 + +### New Features: + +- A `gr.ClearButton` which allows users to easily clear the values of components by [@abidlabs](https://github.com/abidlabs) in [PR 4456](https://github.com/gradio-app/gradio/pull/4456) + +Example usage: + +```py +import gradio as gr + +with gr.Blocks() as demo: + chatbot = gr.Chatbot([("Hello", "How are you?")]) + with gr.Row(): + textbox = gr.Textbox(scale=3, interactive=True) + gr.ClearButton([textbox, chatbot], scale=1) + +demo.launch() +``` + +- Min and max value for gr.Number by [@artegoser](https://github.com/artegoser) and [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3991](https://github.com/gradio-app/gradio/pull/3991) +- Add `start_recording` and `stop_recording` events to `Video` and `Audio` components by [@pngwn](https://github.com/pngwn) in [PR 4422](https://github.com/gradio-app/gradio/pull/4422) +- Allow any function to generate an error message and allow multiple messages to appear at a time. Other error modal improvements such as auto dismiss after a time limit and a new layout on mobile [@pngwn](https://github.com/pngwn) in [PR 4459](https://github.com/gradio-app/gradio/pull/4459). +- Add `autoplay` kwarg to `Video` and `Audio` components by [@pngwn](https://github.com/pngwn) in [PR 4453](https://github.com/gradio-app/gradio/pull/4453) +- Add `allow_preview` parameter to `Gallery` to control whether a detailed preview is displayed on click by + [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4470](https://github.com/gradio-app/gradio/pull/4470) +- Add `latex_delimiters` parameter to `Chatbot` to control the delimiters used for LaTeX and to disable LaTeX in the `Chatbot` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4516](https://github.com/gradio-app/gradio/pull/4516) +- Can now issue `gr.Warning` and `gr.Info` modals. Simply put the code `gr.Warning("Your warning message")` or `gr.Info("Your info message")` as a standalone line in your function. By [@aliabid94](https://github.com/aliabid94) in [PR 4518](https://github.com/gradio-app/gradio/pull/4518). + +Example: + +```python +def start_process(name): + gr.Info("Starting process") + if name is None: + gr.Warning("Name is empty") + ... + if success == False: + raise gr.Error("Process failed") +``` + +### Bug Fixes: + +- Add support for PAUSED state in the JS client by [@abidlabs](https://github.com/abidlabs) in [PR 4438](https://github.com/gradio-app/gradio/pull/4438) +- Ensure Tabs only occupy the space required by [@pngwn](https://github.com/pngwn) in [PR 4419](https://github.com/gradio-app/gradio/pull/4419) +- Ensure components have the correct empty sizes to prevent empty containers from collapsing by [@pngwn](https://github.com/pngwn) in [PR 4447](https://github.com/gradio-app/gradio/pull/4447). +- Frontend code no longer crashes when there is a relative URL in an `` element, by [@akx](https://github.com/akx) in [PR 4449](https://github.com/gradio-app/gradio/pull/4449). +- Fix bug where setting `format='mp4'` on a video component would cause the function to error out if the uploaded video was not playable by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4467](https://github.com/gradio-app/gradio/pull/4467) +- Fix `_js` parameter to work even without backend function, by [@aliabid94](https://github.com/aliabid94) in [PR 4486](https://github.com/gradio-app/gradio/pull/4486). +- Fix new line issue with `gr.Chatbot()` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4491](https://github.com/gradio-app/gradio/pull/4491) +- Fixes issue with Clear button not working for `Label` component by [@abidlabs](https://github.com/abidlabs) in [PR 4456](https://github.com/gradio-app/gradio/pull/4456) +- Restores the ability to pass in a tuple (sample rate, audio array) to gr.Audio() by [@abidlabs](https://github.com/abidlabs) in [PR 4525](https://github.com/gradio-app/gradio/pull/4525) +- Ensure code is correctly formatted and copy button is always present in Chatbot by [@pngwn](https://github.com/pngwn) in [PR 4527](https://github.com/gradio-app/gradio/pull/4527) +- `show_label` will not automatically be set to `True` in `gr.BarPlot.update` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4531](https://github.com/gradio-app/gradio/pull/4531) +- `gr.BarPlot` group text now respects darkmode by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4531](https://github.com/gradio-app/gradio/pull/4531) +- Fix dispatched errors from within components [@aliabid94](https://github.com/aliabid94) in [PR 4786](https://github.com/gradio-app/gradio/pull/4786) + +### Other Changes: + +- Change styling of status and toast error components by [@hannahblair](https://github.com/hannahblair) in [PR 4454](https://github.com/gradio-app/gradio/pull/4454). +- Clean up unnecessary `new Promise()`s by [@akx](https://github.com/akx) in [PR 4442](https://github.com/gradio-app/gradio/pull/4442). +- Minor UI cleanup for Examples and Dataframe components [@aliabid94](https://github.com/aliabid94) in [PR 4455](https://github.com/gradio-app/gradio/pull/4455). +- Minor UI cleanup for Examples and Dataframe components [@aliabid94](https://github.com/aliabid94) in [PR 4455](https://github.com/gradio-app/gradio/pull/4455). +- Add Catalan translation [@jordimas](https://github.com/jordimas) in [PR 4483](https://github.com/gradio-app/gradio/pull/4483). +- The API endpoint that loads examples upon click has been given an explicit name ("/load_examples") by [@abidlabs](https://github.com/abidlabs) in [PR 4456](https://github.com/gradio-app/gradio/pull/4456). +- Allows configuration of FastAPI app when calling `mount_gradio_app`, by [@charlesfrye](https://github.com/charlesfrye) in [PR4519](https://github.com/gradio-app/gradio/pull/4519). + +### Breaking Changes: + +- The behavior of the `Clear` button has been changed for `Slider`, `CheckboxGroup`, `Radio`, `Dropdown` components by [@abidlabs](https://github.com/abidlabs) in [PR 4456](https://github.com/gradio-app/gradio/pull/4456). The Clear button now sets the value of these components to be empty as opposed to the original default set by the developer. This is to make them in line with the rest of the Gradio components. +- Python 3.7 end of life is June 27 2023. Gradio will no longer support python 3.7 by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4484](https://github.com/gradio-app/gradio/pull/4484) +- Removed `$` as a default LaTeX delimiter for the `Chatbot` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4516](https://github.com/gradio-app/gradio/pull/4516). The specific LaTeX delimeters can be set using the new `latex_delimiters` parameter in `Chatbot`. + +## 3.34.0 + +### New Features: + +- The `gr.UploadButton` component now supports the `variant` and `interactive` parameters by [@abidlabs](https://github.com/abidlabs) in [PR 4436](https://github.com/gradio-app/gradio/pull/4436). + +### Bug Fixes: + +- Remove target="\_blank" override on anchor tags with internal targets by [@hannahblair](https://github.com/hannahblair) in [PR 4405](https://github.com/gradio-app/gradio/pull/4405) +- Fixed bug where `gr.File(file_count='multiple')` could not be cached as output by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4421](https://github.com/gradio-app/gradio/pull/4421) +- Restricts the domains that can be proxied via `/proxy` route by [@abidlabs](https://github.com/abidlabs) in [PR 4406](https://github.com/gradio-app/gradio/pull/4406). +- Fixes issue where `gr.UploadButton` could not be used to upload the same file twice by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4437](https://github.com/gradio-app/gradio/pull/4437) +- Fixes bug where `/proxy` route was being incorrectly constructed by the frontend by [@abidlabs](https://github.com/abidlabs) in [PR 4430](https://github.com/gradio-app/gradio/pull/4430). +- Fix z-index of status component by [@hannahblair](https://github.com/hannahblair) in [PR 4429](https://github.com/gradio-app/gradio/pull/4429) +- Fix video rendering in Safari by [@aliabid94](https://github.com/aliabid94) in [PR 4433](https://github.com/gradio-app/gradio/pull/4433). +- The output directory for files downloaded when calling Blocks as a function is now set to a temporary directory by default (instead of the working directory in some cases) by [@abidlabs](https://github.com/abidlabs) in [PR 4501](https://github.com/gradio-app/gradio/pull/4501) + +### Other Changes: + +- When running on Spaces, handler functions will be transformed by the [PySpaces](https://pypi.org/project/spaces/) library in order to make them work with specific hardware. It will have no effect on standalone Gradio apps or regular Gradio Spaces and can be globally deactivated as follows : `import spaces; spaces.disable_gradio_auto_wrap()` by [@cbensimon](https://github.com/cbensimon) in [PR 4389](https://github.com/gradio-app/gradio/pull/4389). +- Deprecated `.style` parameter and moved arguments to constructor. Added support for `.update()` to all arguments initially in style. Added `scale` and `min_width` support to every Component. By [@aliabid94](https://github.com/aliabid94) in [PR 4374](https://github.com/gradio-app/gradio/pull/4374) + +### Breaking Changes: + +No changes to highlight. + +## 3.33.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Allow `every` to work with generators by [@dkjshk](https://github.com/dkjshk) in [PR 4434](https://github.com/gradio-app/gradio/pull/4434) +- Fix z-index of status component by [@hannahblair](https://github.com/hannahblair) in [PR 4429](https://github.com/gradio-app/gradio/pull/4429) +- Allow gradio to work offline, by [@aliabid94](https://github.com/aliabid94) in [PR 4398](https://github.com/gradio-app/gradio/pull/4398). +- Fixed `validate_url` to check for 403 errors and use a GET request in place of a HEAD by [@alvindaiyan](https://github.com/alvindaiyan) in [PR 4388](https://github.com/gradio-app/gradio/pull/4388). + +### Other Changes: + +- More explicit error message when share link binary is blocked by antivirus by [@abidlabs](https://github.com/abidlabs) in [PR 4380](https://github.com/gradio-app/gradio/pull/4380). + +### Breaking Changes: + +No changes to highlight. + +## 3.33.0 + +### New Features: + +- Introduced `gradio deploy` to launch a Gradio app to Spaces directly from your terminal. By [@aliabid94](https://github.com/aliabid94) in [PR 4033](https://github.com/gradio-app/gradio/pull/4033). +- Introduce `show_progress='corner'` argument to event listeners, which will not cover the output components with the progress animation, but instead show it in the corner of the components. By [@aliabid94](https://github.com/aliabid94) in [PR 4396](https://github.com/gradio-app/gradio/pull/4396). + +### Bug Fixes: + +- Fix bug where Label change event was triggering itself by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4371](https://github.com/gradio-app/gradio/pull/4371) +- Make `Blocks.load` behave like other event listeners (allows chaining `then` off of it) [@anentropic](https://github.com/anentropic/) in [PR 4304](https://github.com/gradio-app/gradio/pull/4304) +- Respect `interactive=True` in output components of a `gr.Interface` by [@abidlabs](https://github.com/abidlabs) in [PR 4356](https://github.com/gradio-app/gradio/pull/4356). +- Remove unused frontend code by [@akx](https://github.com/akx) in [PR 4275](https://github.com/gradio-app/gradio/pull/4275) +- Fixes favicon path on Windows by [@abidlabs](https://github.com/abidlabs) in [PR 4369](https://github.com/gradio-app/gradio/pull/4369). +- Prevent path traversal in `/file` routes by [@abidlabs](https://github.com/abidlabs) in [PR 4370](https://github.com/gradio-app/gradio/pull/4370). +- Do not send HF token to other domains via `/proxy` route by [@abidlabs](https://github.com/abidlabs) in [PR 4368](https://github.com/gradio-app/gradio/pull/4368). +- Replace default `markedjs` sanitize function with DOMPurify sanitizer for `gr.Chatbot()` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4360](https://github.com/gradio-app/gradio/pull/4360) +- Prevent the creation of duplicate copy buttons in the chatbot and ensure copy buttons work in non-secure contexts by [@binary-husky](https://github.com/binary-husky) in [PR 4350](https://github.com/gradio-app/gradio/pull/4350). + +### Other Changes: + +- Remove flicker of loading bar by adding opacity transition, by [@aliabid94](https://github.com/aliabid94) in [PR 4349](https://github.com/gradio-app/gradio/pull/4349). +- Performance optimization in the frontend's Blocks code by [@akx](https://github.com/akx) in [PR 4334](https://github.com/gradio-app/gradio/pull/4334) +- Upgrade the pnpm lock file format version from v6.0 to v6.1 by [@whitphx](https://github.com/whitphx) in [PR 4393](https://github.com/gradio-app/gradio/pull/4393) + +### Breaking Changes: + +- The `/file=` route no longer allows accessing dotfiles or files in "dot directories" by [@akx](https://github.com/akx) in [PR 4303](https://github.com/gradio-app/gradio/pull/4303) + +## 3.32.0 + +### New Features: + +- `Interface.launch()` and `Blocks.launch()` now accept an `app_kwargs` argument to allow customizing the configuration of the underlying FastAPI app, by [@akx](https://github.com/akx) in [PR 4282](https://github.com/gradio-app/gradio/pull/4282) + +### Bug Fixes: + +- Fixed Gallery/AnnotatedImage components not respecting GRADIO_DEFAULT_DIR variable by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4256](https://github.com/gradio-app/gradio/pull/4256) +- Fixed Gallery/AnnotatedImage components resaving identical images by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4256](https://github.com/gradio-app/gradio/pull/4256) +- Fixed Audio/Video/File components creating empty tempfiles on each run by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4256](https://github.com/gradio-app/gradio/pull/4256) +- Fixed the behavior of the `run_on_click` parameter in `gr.Examples` by [@abidlabs](https://github.com/abidlabs) in [PR 4258](https://github.com/gradio-app/gradio/pull/4258). +- Ensure error modal displays when the queue is enabled by [@pngwn](https://github.com/pngwn) in [PR 4273](https://github.com/gradio-app/gradio/pull/4273) +- Ensure js client respcts the full root when making requests to the server by [@pngwn](https://github.com/pngwn) in [PR 4271](https://github.com/gradio-app/gradio/pull/4271) + +### Other Changes: + +- Refactor web component `initial_height` attribute by [@whitphx](https://github.com/whitphx) in [PR 4223](https://github.com/gradio-app/gradio/pull/4223) +- Relocate `mount_css` fn to remove circular dependency [@whitphx](https://github.com/whitphx) in [PR 4222](https://github.com/gradio-app/gradio/pull/4222) +- Upgrade Black to 23.3 by [@akx](https://github.com/akx) in [PR 4259](https://github.com/gradio-app/gradio/pull/4259) +- Add frontend LaTeX support in `gr.Chatbot()` using `KaTeX` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4285](https://github.com/gradio-app/gradio/pull/4285). + +### Breaking Changes: + +No changes to highlight. + +## 3.31.0 + +### New Features: + +- The reloader command (`gradio app.py`) can now accept command line arguments by [@micky2be](https://github.com/micky2be) in [PR 4119](https://github.com/gradio-app/gradio/pull/4119) +- Added `format` argument to `Audio` component by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4178](https://github.com/gradio-app/gradio/pull/4178) +- Add JS client code snippets to use via api page by [@aliabd](https://github.com/aliabd) in [PR 3927](https://github.com/gradio-app/gradio/pull/3927). +- Update to the JS client by [@pngwn](https://github.com/pngwn) in [PR 4202](https://github.com/gradio-app/gradio/pull/4202) + +### Bug Fixes: + +- Fix "TypeError: issubclass() arg 1 must be a class" When use Optional[Types] by [@lingfengchencn](https://github.com/lingfengchencn) in [PR 4200](https://github.com/gradio-app/gradio/pull/4200). +- Gradio will no longer send any analytics or call home if analytics are disabled with the GRADIO_ANALYTICS_ENABLED environment variable. By [@akx](https://github.com/akx) in [PR 4194](https://github.com/gradio-app/gradio/pull/4194) and [PR 4236](https://github.com/gradio-app/gradio/pull/4236) +- The deprecation warnings for kwargs now show the actual stack level for the invocation, by [@akx](https://github.com/akx) in [PR 4203](https://github.com/gradio-app/gradio/pull/4203). +- Fix "TypeError: issubclass() arg 1 must be a class" When use Optional[Types] by [@lingfengchencn](https://github.com/lingfengchencn) in [PR 4200](https://github.com/gradio-app/gradio/pull/4200). +- Ensure cancelling functions work correctly by [@pngwn](https://github.com/pngwn) in [PR 4225](https://github.com/gradio-app/gradio/pull/4225) +- Fixes a bug with typing.get_type_hints() on Python 3.9 by [@abidlabs](https://github.com/abidlabs) in [PR 4228](https://github.com/gradio-app/gradio/pull/4228). +- Fixes JSONDecodeError by [@davidai](https://github.com/davidai) in [PR 4241](https://github.com/gradio-app/gradio/pull/4241) +- Fix `chatbot_dialogpt` demo by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4238](https://github.com/gradio-app/gradio/pull/4238). + +### Other Changes: + +- Change `gr.Chatbot()` markdown parsing to frontend using `marked` library and `prism` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4150](https://github.com/gradio-app/gradio/pull/4150) +- Update the js client by [@pngwn](https://github.com/pngwn) in [PR 3899](https://github.com/gradio-app/gradio/pull/3899) +- Fix documentation for the shape of the numpy array produced by the `Image` component by [@der3318](https://github.com/der3318) in [PR 4204](https://github.com/gradio-app/gradio/pull/4204). +- Updates the timeout for websocket messaging from 1 second to 5 seconds by [@abidlabs](https://github.com/abidlabs) in [PR 4235](https://github.com/gradio-app/gradio/pull/4235) + +### Breaking Changes: + +No changes to highlight. + +## 3.30.0 + +### New Features: + +- Adds a `root_path` parameter to `launch()` that allows running Gradio applications on subpaths (e.g. www.example.com/app) behind a proxy, by [@abidlabs](https://github.com/abidlabs) in [PR 4133](https://github.com/gradio-app/gradio/pull/4133) +- Fix dropdown change listener to trigger on change when updated as an output by [@aliabid94](https://github.com/aliabid94) in [PR 4128](https://github.com/gradio-app/gradio/pull/4128). +- Add `.input` event listener, which is only triggered when a user changes the component value (as compared to `.change`, which is also triggered when a component updates as the result of a function trigger), by [@aliabid94](https://github.com/aliabid94) in [PR 4157](https://github.com/gradio-app/gradio/pull/4157). + +### Bug Fixes: + +- Records username when flagging by [@abidlabs](https://github.com/abidlabs) in [PR 4135](https://github.com/gradio-app/gradio/pull/4135) +- Fix website build issue by [@aliabd](https://github.com/aliabd) in [PR 4142](https://github.com/gradio-app/gradio/pull/4142) +- Fix lang agnostic type info for `gr.File(file_count='multiple')` output components by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4153](https://github.com/gradio-app/gradio/pull/4153) + +### Other Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +## 3.29.0 + +### New Features: + +- Returning language agnostic types in the `/info` route by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4039](https://github.com/gradio-app/gradio/pull/4039) + +### Bug Fixes: + +- Allow users to upload audio files in Audio component on iOS by by [@aliabid94](https://github.com/aliabid94) in [PR 4071](https://github.com/gradio-app/gradio/pull/4071). +- Fixes the gradio theme builder error that appeared on launch by [@aliabid94](https://github.com/aliabid94) and [@abidlabs](https://github.com/abidlabs) in [PR 4080](https://github.com/gradio-app/gradio/pull/4080) +- Keep Accordion content in DOM by [@aliabid94](https://github.com/aliabid94) in [PR 4070](https://github.com/gradio-app/gradio/pull/4073) +- Fixed bug where type hints in functions caused the event handler to crash by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 4068](https://github.com/gradio-app/gradio/pull/4068) +- Fix dropdown default value not appearing by [@aliabid94](https://github.com/aliabid94) in [PR 4072](https://github.com/gradio-app/gradio/pull/4072). +- Soft theme label color fix by [@aliabid94](https://github.com/aliabid94) in [PR 4070](https://github.com/gradio-app/gradio/pull/4070) +- Fix `gr.Slider` `release` event not triggering on mobile by [@space-nuko](https://github.com/space-nuko) in [PR 4098](https://github.com/gradio-app/gradio/pull/4098) +- Removes extraneous `State` component info from the `/info` route by [@abidlabs](https://github.com/freddyaboulton) in [PR 4107](https://github.com/gradio-app/gradio/pull/4107) +- Make .then() work even if first event fails by [@aliabid94](https://github.com/aliabid94) in [PR 4115](https://github.com/gradio-app/gradio/pull/4115). + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Allow users to submit with enter in Interfaces with textbox / number inputs [@aliabid94](https://github.com/aliabid94) in [PR 4090](https://github.com/gradio-app/gradio/pull/4090). +- Updates gradio's requirements.txt to requires uvicorn>=0.14.0 by [@abidlabs](https://github.com/abidlabs) in [PR 4086](https://github.com/gradio-app/gradio/pull/4086) +- Updates some error messaging by [@abidlabs](https://github.com/abidlabs) in [PR 4086](https://github.com/gradio-app/gradio/pull/4086) +- Renames simplified Chinese translation file from `zh-cn.json` to `zh-CN.json` by [@abidlabs](https://github.com/abidlabs) in [PR 4086](https://github.com/gradio-app/gradio/pull/4086) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.28.3 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixes issue with indentation in `gr.Code()` component with streaming by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4043](https://github.com/gradio-app/gradio/pull/4043) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.28.2 + +### Bug Fixes + +- Code component visual updates by [@pngwn](https://github.com/pngwn) in [PR 4051](https://github.com/gradio-app/gradio/pull/4051) + +### New Features: + +- Add support for `visual-question-answering`, `document-question-answering`, and `image-to-text` using `gr.Interface.load("models/...")` and `gr.Interface.from_pipeline` by [@osanseviero](https://github.com/osanseviero) in [PR 3887](https://github.com/gradio-app/gradio/pull/3887) +- Add code block support in `gr.Chatbot()`, by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 4048](https://github.com/gradio-app/gradio/pull/4048) +- Adds the ability to blocklist filepaths (and also improves the allowlist mechanism) by [@abidlabs](https://github.com/abidlabs) in [PR 4047](https://github.com/gradio-app/gradio/pull/4047). +- Adds the ability to specify the upload directory via an environment variable by [@abidlabs](https://github.com/abidlabs) in [PR 4047](https://github.com/gradio-app/gradio/pull/4047). + +### Bug Fixes: + +- Fixes issue with `matplotlib` not rendering correctly if the backend was not set to `Agg` by [@abidlabs](https://github.com/abidlabs) in [PR 4029](https://github.com/gradio-app/gradio/pull/4029) +- Fixes bug where rendering the same `gr.State` across different Interfaces/Blocks within larger Blocks would not work by [@abidlabs](https://github.com/abidlabs) in [PR 4030](https://github.com/gradio-app/gradio/pull/4030) +- Code component visual updates by [@pngwn](https://github.com/pngwn) in [PR 4051](https://github.com/gradio-app/gradio/pull/4051) + +### Documentation Changes: + +- Adds a Guide on how to use the Python Client within a FastAPI app, by [@abidlabs](https://github.com/abidlabs) in [PR 3892](https://github.com/gradio-app/gradio/pull/3892) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +- `gr.HuggingFaceDatasetSaver` behavior changed internally. The `flagging/` folder is not a `.git/` folder anymore when using it. `organization` parameter is now ignored in favor of passing a full dataset id as `dataset_name` (e.g. `"username/my-dataset"`). +- New lines (`\n`) are not automatically converted to `
` in `gr.Markdown()` or `gr.Chatbot()`. For multiple new lines, a developer must add multiple `
` tags. + +### Full Changelog: + +- Safer version of `gr.HuggingFaceDatasetSaver` using HTTP methods instead of git pull/push by [@Wauplin](https://github.com/Wauplin) in [PR 3973](https://github.com/gradio-app/gradio/pull/3973) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.28.1 + +### New Features: + +- Add a "clear mask" button to `gr.Image` sketch modes, by [@space-nuko](https://github.com/space-nuko) in [PR 3615](https://github.com/gradio-app/gradio/pull/3615) + +### Bug Fixes: + +- Fix dropdown default value not appearing by [@aliabid94](https://github.com/aliabid94) in [PR 3996](https://github.com/gradio-app/gradio/pull/3996). +- Fix faded coloring of output textboxes in iOS / Safari by [@aliabid94](https://github.com/aliabid94) in [PR 3993](https://github.com/gradio-app/gradio/pull/3993) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +- CI: Simplified Python CI workflow by [@akx](https://github.com/akx) in [PR 3982](https://github.com/gradio-app/gradio/pull/3982) +- Upgrade pyright to 1.1.305 by [@akx](https://github.com/akx) in [PR 4042](https://github.com/gradio-app/gradio/pull/4042) +- More Ruff rules are enabled and lint errors fixed by [@akx](https://github.com/akx) in [PR 4038](https://github.com/gradio-app/gradio/pull/4038) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.28.0 + +### Bug Fixes: + +- Fix duplicate play commands in full-screen mode of 'video'. by [@tomchang25](https://github.com/tomchang25) in [PR 3968](https://github.com/gradio-app/gradio/pull/3968). +- Fix the issue of the UI stuck caused by the 'selected' of DataFrame not being reset. by [@tomchang25](https://github.com/tomchang25) in [PR 3916](https://github.com/gradio-app/gradio/pull/3916). +- Fix issue where `gr.Video()` would not work inside a `gr.Tab()` by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3891](https://github.com/gradio-app/gradio/pull/3891) +- Fixed issue with old_value check in File. by [@tomchang25](https://github.com/tomchang25) in [PR 3859](https://github.com/gradio-app/gradio/pull/3859). +- Fixed bug where all bokeh plots appeared in the same div by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3896](https://github.com/gradio-app/gradio/pull/3896) +- Fixed image outputs to automatically take full output image height, unless explicitly set, by [@aliabid94](https://github.com/aliabid94) in [PR 3905](https://github.com/gradio-app/gradio/pull/3905) +- Fix issue in `gr.Gallery()` where setting height causes aspect ratio of images to collapse by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3830](https://github.com/gradio-app/gradio/pull/3830) +- Fix issue where requesting for a non-existing file would trigger a 500 error by [@micky2be](https://github.com/micky2be) in `[PR 3895](https://github.com/gradio-app/gradio/pull/3895)`. +- Fix bugs with abspath about symlinks, and unresolvable path on Windows by [@micky2be](https://github.com/micky2be) in `[PR 3895](https://github.com/gradio-app/gradio/pull/3895)`. +- Fixes type in client `Status` enum by [@10zinten](https://github.com/10zinten) in [PR 3931](https://github.com/gradio-app/gradio/pull/3931) +- Fix `gr.ChatBot` to handle image url [tye-singwa](https://github.com/tye-signwa) in [PR 3953](https://github.com/gradio-app/gradio/pull/3953) +- Move Google Tag Manager related initialization code to analytics-enabled block by [@akx](https://github.com/akx) in [PR 3956](https://github.com/gradio-app/gradio/pull/3956) +- Fix bug where port was not reused if the demo was closed and then re-launched by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3896](https://github.com/gradio-app/gradio/pull/3959) +- Fixes issue where dropdown does not position itself at selected element when opened [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3639](https://github.com/gradio-app/gradio/pull/3639) + +### Documentation Changes: + +- Make use of `gr` consistent across the docs by [@duerrsimon](https://github.com/duerrsimon) in [PR 3901](https://github.com/gradio-app/gradio/pull/3901) +- Fixed typo in theming-guide.md by [@eltociear](https://github.com/eltociear) in [PR 3952](https://github.com/gradio-app/gradio/pull/3952) + +### Testing and Infrastructure Changes: + +- CI: Python backend lint is only run once, by [@akx](https://github.com/akx) in [PR 3960](https://github.com/gradio-app/gradio/pull/3960) +- Format invocations and concatenations were replaced by f-strings where possible by [@akx](https://github.com/akx) in [PR 3984](https://github.com/gradio-app/gradio/pull/3984) +- Linting rules were made more strict and issues fixed by [@akx](https://github.com/akx) in [PR 3979](https://github.com/gradio-app/gradio/pull/3979). + +### Breaking Changes: + +- Some re-exports in `gradio.themes` utilities (introduced in 3.24.0) have been eradicated. + By [@akx](https://github.com/akx) in [PR 3958](https://github.com/gradio-app/gradio/pull/3958) + +### Full Changelog: + +- Add DESCRIPTION.md to image_segmentation demo by [@aliabd](https://github.com/aliabd) in [PR 3866](https://github.com/gradio-app/gradio/pull/3866) +- Fix error in running `gr.themes.builder()` by [@deepkyu](https://github.com/deepkyu) in [PR 3869](https://github.com/gradio-app/gradio/pull/3869) +- Fixed a JavaScript TypeError when loading custom JS with `_js` and setting `outputs` to `None` in `gradio.Blocks()` by [@DavG25](https://github.com/DavG25) in [PR 3883](https://github.com/gradio-app/gradio/pull/3883) +- Fixed bg_background_fill theme property to expand to whole background, block_radius to affect form elements as well, and added block_label_shadow theme property by [@aliabid94](https://github.com/aliabid94) in [PR 3590](https://github.com/gradio-app/gradio/pull/3590) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.27.0 + +### New Features: + +###### AnnotatedImage Component + +New AnnotatedImage component allows users to highlight regions of an image, either by providing bounding boxes, or 0-1 pixel masks. This component is useful for tasks such as image segmentation, object detection, and image captioning. + +![AnnotatedImage screenshot](https://user-images.githubusercontent.com/7870876/232142720-86e0020f-beaf-47b9-a843-689c9621f09c.gif) + +Example usage: + +```python +with gr.Blocks() as demo: + img = gr.Image() + img_section = gr.AnnotatedImage() + def mask(img): + top_left_corner = [0, 0, 20, 20] + random_mask = np.random.randint(0, 2, img.shape[:2]) + return (img, [(top_left_corner, "left corner"), (random_mask, "random")]) + img.change(mask, img, img_section) +``` + +See the [image_segmentation demo](https://github.com/gradio-app/gradio/tree/main/demo/image_segmentation) for a full example. By [@aliabid94](https://github.com/aliabid94) in [PR 3836](https://github.com/gradio-app/gradio/pull/3836) + +### Bug Fixes: + +No changes to highlight. + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.26.0 + +### New Features: + +###### `Video` component supports subtitles + +- Allow the video component to accept subtitles as input, by [@tomchang25](https://github.com/tomchang25) in [PR 3673](https://github.com/gradio-app/gradio/pull/3673). To provide subtitles, simply return a tuple consisting of `(path_to_video, path_to_subtitles)` from your function. Both `.srt` and `.vtt` formats are supported: + +```py +with gr.Blocks() as demo: + gr.Video(("video.mp4", "captions.srt")) +``` + +### Bug Fixes: + +- Fix code markdown support in `gr.Chatbot()` component by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3816](https://github.com/gradio-app/gradio/pull/3816) + +### Documentation Changes: + +- Updates the "view API" page in Gradio apps to use the `gradio_client` library by [@aliabd](https://github.com/aliabd) in [PR 3765](https://github.com/gradio-app/gradio/pull/3765) + +- Read more about how to use the `gradio_client` library here: https://gradio.app/getting-started-with-the-python-client/ + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.25.0 + +### New Features: + +- Improve error messages when number of inputs/outputs to event handlers mismatch, by [@space-nuko](https://github.com/space-nuko) in [PR 3519](https://github.com/gradio-app/gradio/pull/3519) + +- Add `select` listener to Images, allowing users to click on any part of an image and get the coordinates of the click by [@aliabid94](https://github.com/aliabid94) in [PR 3786](https://github.com/gradio-app/gradio/pull/3786). + +```python +with gr.Blocks() as demo: + img = gr.Image() + textbox = gr.Textbox() + + def select_handler(img, evt: gr.SelectData): + selected_pixel = img[evt.index[1], evt.index[0]] + return f"Selected pixel: {selected_pixel}" + + img.select(select_handler, img, textbox) +``` + +![Recording 2023-04-08 at 17 44 39](https://user-images.githubusercontent.com/7870876/230748572-90a2a8d5-116d-4769-bb53-5516555fbd0f.gif) + +### Bug Fixes: + +- Increase timeout for sending analytics data by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3647](https://github.com/gradio-app/gradio/pull/3647) +- Fix bug where http token was not accessed over websocket connections by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3735](https://github.com/gradio-app/gradio/pull/3735) +- Add ability to specify `rows`, `columns` and `object-fit` in `style()` for `gr.Gallery()` component by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3586](https://github.com/gradio-app/gradio/pull/3586) +- Fix bug where recording an audio file through the microphone resulted in a corrupted file name by [@abidlabs](https://github.com/abidlabs) in [PR 3770](https://github.com/gradio-app/gradio/pull/3770) +- Added "ssl_verify" to blocks.launch method to allow for use of self-signed certs by [@garrettsutula](https://github.com/garrettsutula) in [PR 3873](https://github.com/gradio-app/gradio/pull/3873) +- Fix bug where iterators where not being reset for processes that terminated early by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3777](https://github.com/gradio-app/gradio/pull/3777) +- Fix bug where the upload button was not properly handling the `file_count='multiple'` case by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3782](https://github.com/gradio-app/gradio/pull/3782) +- Fix bug where use Via API button was giving error by [@Devang-C](https://github.com/Devang-C) in [PR 3783](https://github.com/gradio-app/gradio/pull/3783) + +### Documentation Changes: + +- Fix invalid argument docstrings, by [@akx](https://github.com/akx) in [PR 3740](https://github.com/gradio-app/gradio/pull/3740) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Fixed IPv6 listening to work with bracket [::1] notation, by [@dsully](https://github.com/dsully) in [PR 3695](https://github.com/gradio-app/gradio/pull/3695) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.24.1 + +### New Features: + +- No changes to highlight. + +### Bug Fixes: + +- Fixes Chatbot issue where new lines were being created every time a message was sent back and forth by [@aliabid94](https://github.com/aliabid94) in [PR 3717](https://github.com/gradio-app/gradio/pull/3717). +- Fixes data updating in DataFrame invoking a `select` event once the dataframe has been selected. By [@yiyuezhuo](https://github.com/yiyuezhuo) in [PR 3861](https://github.com/gradio-app/gradio/pull/3861) +- Fixes false positive warning which is due to too strict type checking by [@yiyuezhuo](https://github.com/yiyuezhuo) in [PR 3837](https://github.com/gradio-app/gradio/pull/3837). + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.24.0 + +### New Features: + +- Trigger the release event when Slider number input is released or unfocused by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3589](https://github.com/gradio-app/gradio/pull/3589) +- Created Theme Builder, which allows users to create themes without writing any code, by [@aliabid94](https://github.com/aliabid94) in [PR 3664](https://github.com/gradio-app/gradio/pull/3664). Launch by: + + ```python + import gradio as gr + gr.themes.builder() + ``` + + ![Theme Builder](https://user-images.githubusercontent.com/7870876/228204929-d71cbba5-69c2-45b3-bd20-e3a201d98b12.png) + +- The `Dropdown` component now has a `allow_custom_value` parameter that lets users type in custom values not in the original list of choices. +- The `Colorpicker` component now has a `.blur()` event + +###### Added a download button for videos! 📥 + +![download_video](https://user-images.githubusercontent.com/41651716/227009612-9bc5fb72-2a44-4c55-9b7b-a0fa098e7f25.gif) + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3581](https://github.com/gradio-app/gradio/pull/3581). + +- Trigger the release event when Slider number input is released or unfocused by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3589](https://github.com/gradio-app/gradio/pull/3589) + +### Bug Fixes: + +- Fixed bug where text for altair plots was not legible in dark mode by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3555](https://github.com/gradio-app/gradio/pull/3555) +- Fixes `Chatbot` and `Image` components so that files passed during processing are added to a directory where they can be served from, by [@abidlabs](https://github.com/abidlabs) in [PR 3523](https://github.com/gradio-app/gradio/pull/3523) +- Use Gradio API server to send telemetry using `huggingface_hub` [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3488](https://github.com/gradio-app/gradio/pull/3488) +- Fixes an an issue where if the Blocks scope was not exited, then State could be shared across sessions, by [@abidlabs](https://github.com/abidlabs) in [PR 3600](https://github.com/gradio-app/gradio/pull/3600) +- Ensures that `gr.load()` loads and applies the upstream theme, by [@abidlabs](https://github.com/abidlabs) in [PR 3641](https://github.com/gradio-app/gradio/pull/3641) +- Fixed bug where "or" was not being localized in file upload text by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3599](https://github.com/gradio-app/gradio/pull/3599) +- Fixed bug where chatbot does not autoscroll inside of a tab, row or column by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3637](https://github.com/gradio-app/gradio/pull/3637) +- Fixed bug where textbox shrinks when `lines` set to larger than 20 by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3637](https://github.com/gradio-app/gradio/pull/3637) +- Ensure CSS has fully loaded before rendering the application, by [@pngwn](https://github.com/pngwn) in [PR 3573](https://github.com/gradio-app/gradio/pull/3573) +- Support using an empty list as `gr.Dataframe` value, by [@space-nuko](https://github.com/space-nuko) in [PR 3646](https://github.com/gradio-app/gradio/pull/3646) +- Fixed `gr.Image` not filling the entire element size, by [@space-nuko](https://github.com/space-nuko) in [PR 3649](https://github.com/gradio-app/gradio/pull/3649) +- Make `gr.Code` support the `lines` property, by [@space-nuko](https://github.com/space-nuko) in [PR 3651](https://github.com/gradio-app/gradio/pull/3651) +- Fixes certain `_js` return values being double wrapped in an array, by [@space-nuko](https://github.com/space-nuko) in [PR 3594](https://github.com/gradio-app/gradio/pull/3594) +- Correct the documentation of `gr.File` component to state that its preprocessing method converts the uploaded file to a temporary file, by @RussellLuo in [PR 3660](https://github.com/gradio-app/gradio/pull/3660) +- Fixed bug in Serializer ValueError text by [@osanseviero](https://github.com/osanseviero) in [PR 3669](https://github.com/gradio-app/gradio/pull/3669) +- Fix default parameter argument and `gr.Progress` used in same function, by [@space-nuko](https://github.com/space-nuko) in [PR 3671](https://github.com/gradio-app/gradio/pull/3671) +- Hide `Remove All` button in `gr.Dropdown` single-select mode by [@space-nuko](https://github.com/space-nuko) in [PR 3678](https://github.com/gradio-app/gradio/pull/3678) +- Fix broken spaces in docs by [@aliabd](https://github.com/aliabd) in [PR 3698](https://github.com/gradio-app/gradio/pull/3698) +- Fix items in `gr.Dropdown` besides the selected item receiving a checkmark, by [@space-nuko](https://github.com/space-nuko) in [PR 3644](https://github.com/gradio-app/gradio/pull/3644) +- Fix several `gr.Dropdown` issues and improve usability, by [@space-nuko](https://github.com/space-nuko) in [PR 3705](https://github.com/gradio-app/gradio/pull/3705) + +### Documentation Changes: + +- Makes some fixes to the Theme Guide related to naming of variables, by [@abidlabs](https://github.com/abidlabs) in [PR 3561](https://github.com/gradio-app/gradio/pull/3561) +- Documented `HuggingFaceDatasetJSONSaver` by [@osanseviero](https://github.com/osanseviero) in [PR 3604](https://github.com/gradio-app/gradio/pull/3604) +- Makes some additions to documentation of `Audio` and `State` components, and fixes the `pictionary` demo by [@abidlabs](https://github.com/abidlabs) in [PR 3611](https://github.com/gradio-app/gradio/pull/3611) +- Fix outdated sharing your app guide by [@aliabd](https://github.com/aliabd) in [PR 3699](https://github.com/gradio-app/gradio/pull/3699) + +### Testing and Infrastructure Changes: + +- Removed heavily-mocked tests related to comet_ml, wandb, and mlflow as they added a significant amount of test dependencies that prevented installation of test dependencies on Windows environments. By [@abidlabs](https://github.com/abidlabs) in [PR 3608](https://github.com/gradio-app/gradio/pull/3608) +- Added Windows continuous integration, by [@space-nuko](https://github.com/space-nuko) in [PR 3628](https://github.com/gradio-app/gradio/pull/3628) +- Switched linting from flake8 + isort to `ruff`, by [@akx](https://github.com/akx) in [PR 3710](https://github.com/gradio-app/gradio/pull/3710) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Mobile responsive iframes in themes guide by [@aliabd](https://github.com/aliabd) in [PR 3562](https://github.com/gradio-app/gradio/pull/3562) +- Remove extra $demo from theme guide by [@aliabd](https://github.com/aliabd) in [PR 3563](https://github.com/gradio-app/gradio/pull/3563) +- Set the theme name to be the upstream repo name when loading from the hub by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3595](https://github.com/gradio-app/gradio/pull/3595) +- Copy everything in website Dockerfile, fix build issues by [@aliabd](https://github.com/aliabd) in [PR 3659](https://github.com/gradio-app/gradio/pull/3659) +- Raise error when an event is queued but the queue is not configured by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3640](https://github.com/gradio-app/gradio/pull/3640) +- Allows users to apss in a string name for a built-in theme, by [@abidlabs](https://github.com/abidlabs) in [PR 3641](https://github.com/gradio-app/gradio/pull/3641) +- Added `orig_name` to Video output in the backend so that the front end can set the right name for downloaded video files by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3700](https://github.com/gradio-app/gradio/pull/3700) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.23.0 + +### New Features: + +###### Theme Sharing! + +Once you have created a theme, you can upload it to the HuggingFace Hub to let others view it, use it, and build off of it! You can also download, reuse, and remix other peoples' themes. See https://gradio.app/theming-guide/ for more details. + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3428](https://github.com/gradio-app/gradio/pull/3428) + +### Bug Fixes: + +- Removes leading spaces from all lines of code uniformly in the `gr.Code()` component. By [@abidlabs](https://github.com/abidlabs) in [PR 3556](https://github.com/gradio-app/gradio/pull/3556) +- Fixed broken login page, by [@aliabid94](https://github.com/aliabid94) in [PR 3529](https://github.com/gradio-app/gradio/pull/3529) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Fix rendering of dropdowns to take more space, and related bugs, by [@aliabid94](https://github.com/aliabid94) in [PR 3549](https://github.com/gradio-app/gradio/pull/3549) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.22.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Restore label bars by [@aliabid94](https://github.com/aliabid94) in [PR 3507](https://github.com/gradio-app/gradio/pull/3507) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.22.0 + +### New Features: + +###### Official Theme release + +Gradio now supports a new theme system, which allows you to customize the look and feel of your app. You can now use the `theme=` kwarg to pass in a prebuilt theme, or customize your own! See https://gradio.app/theming-guide/ for more details. By [@aliabid94](https://github.com/aliabid94) in [PR 3470](https://github.com/gradio-app/gradio/pull/3470) and [PR 3497](https://github.com/gradio-app/gradio/pull/3497) + +###### `elem_classes` + +Add keyword argument `elem_classes` to Components to control class names of components, in the same manner as existing `elem_id`. +By [@aliabid94](https://github.com/aliabid94) in [PR 3466](https://github.com/gradio-app/gradio/pull/3466) + +### Bug Fixes: + +- Fixes the File.upload() event trigger which broke as part of the change in how we uploaded files by [@abidlabs](https://github.com/abidlabs) in [PR 3462](https://github.com/gradio-app/gradio/pull/3462) +- Fixed issue with `gr.Request` object failing to handle dictionaries when nested keys couldn't be converted to variable names [#3454](https://github.com/gradio-app/gradio/issues/3454) by [@radames](https://github.com/radames) in [PR 3459](https://github.com/gradio-app/gradio/pull/3459) +- Fixed bug where css and client api was not working properly when mounted in a subpath by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3482](https://github.com/gradio-app/gradio/pull/3482) + +### Documentation Changes: + +- Document gr.Error in the docs by [@aliabd](https://github.com/aliabd) in [PR 3465](https://github.com/gradio-app/gradio/pull/3465) + +### Testing and Infrastructure Changes: + +- Pinned `pyright==1.1.298` for stability by [@abidlabs](https://github.com/abidlabs) in [PR 3475](https://github.com/gradio-app/gradio/pull/3475) +- Removed `IOComponent.add_interactive_to_config()` by [@space-nuko](https://github.com/space-nuko) in [PR 3476](https://github.com/gradio-app/gradio/pull/3476) +- Removed `IOComponent.generate_sample()` by [@space-nuko](https://github.com/space-nuko) in [PR 3475](https://github.com/gradio-app/gradio/pull/3483) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Revert primary button background color in dark mode by [@aliabid94](https://github.com/aliabid94) in [PR 3468](https://github.com/gradio-app/gradio/pull/3468) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.21.0 + +### New Features: + +###### Theme Sharing 🎨 🤝 + +You can now share your gradio themes with the world! + +After creating a theme, you can upload it to the HuggingFace Hub to let others view it, use it, and build off of it! + +###### Uploading + +There are two ways to upload a theme, via the theme class instance or the command line. + +1. Via the class instance + +```python +my_theme.push_to_hub(repo_name="my_theme", + version="0.2.0", + hf_token="...") +``` + +2. Via the command line + +First save the theme to disk + +```python +my_theme.dump(filename="my_theme.json") +``` + +Then use the `upload_theme` command: + +```bash +upload_theme\ +"my_theme.json"\ +"my_theme"\ +"0.2.0"\ +"" +``` + +The `version` must be a valid [semantic version](https://www.geeksforgeeks.org/introduction-semantic-versioning/) string. + +This creates a space on the huggingface hub to host the theme files and show potential users a preview of your theme. + +An example theme space is here: https://huggingface.co/spaces/freddyaboulton/dracula_revamped + +###### Downloading + +To use a theme from the hub, use the `from_hub` method on the `ThemeClass` and pass it to your app: + +```python +my_theme = gr.Theme.from_hub("freddyaboulton/my_theme") + +with gr.Blocks(theme=my_theme) as demo: + .... +``` + +You can also pass the theme string directly to `Blocks` or `Interface` (`gr.Blocks(theme="freddyaboulton/my_theme")`) + +You can pin your app to an upstream theme version by using semantic versioning expressions. + +For example, the following would ensure the theme we load from the `my_theme` repo was between versions `0.1.0` and `0.2.0`: + +```python +with gr.Blocks(theme="freddyaboulton/my_theme@>=0.1.0,<0.2.0") as demo: + .... +``` + +by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3428](https://github.com/gradio-app/gradio/pull/3428) + +###### Code component 🦾 + +New code component allows you to enter, edit and display code with full syntax highlighting by [@pngwn](https://github.com/pngwn) in [PR 3421](https://github.com/gradio-app/gradio/pull/3421) + +###### The `Chatbot` component now supports audio, video, and images + +The `Chatbot` component now supports audio, video, and images with a simple syntax: simply +pass in a tuple with the URL or filepath (the second optional element of the tuple is alt text), and the image/audio/video will be displayed: + +```python +gr.Chatbot([ + (("driving.mp4",), "cool video"), + (("cantina.wav",), "cool audio"), + (("lion.jpg", "A lion"), "cool pic"), +]).style(height=800) +``` + +image + +Note: images were previously supported via Markdown syntax and that is still supported for backwards compatibility. By [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3413](https://github.com/gradio-app/gradio/pull/3413) + +- Allow consecutive function triggers with `.then` and `.success` by [@aliabid94](https://github.com/aliabid94) in [PR 3430](https://github.com/gradio-app/gradio/pull/3430) + +- New code component allows you to enter, edit and display code with full syntax highlighting by [@pngwn](https://github.com/pngwn) in [PR 3421](https://github.com/gradio-app/gradio/pull/3421) + +![](https://user-images.githubusercontent.com/12937446/224116643-5cfb94b3-93ce-43ee-bb7b-c25c3b66e0a1.png) + +- Added the `.select()` event listener, which also includes event data that can be passed as an argument to a function with type hint `gr.SelectData`. The following components support the `.select()` event listener: Chatbot, CheckboxGroup, Dataframe, Dropdown, File, Gallery, HighlightedText, Label, Radio, TabItem, Tab, Textbox. Example usage: + +```python +import gradio as gr + +with gr.Blocks() as demo: + gallery = gr.Gallery(["images/1.jpg", "images/2.jpg", "images/3.jpg"]) + selected_index = gr.Textbox() + + def on_select(evt: gr.SelectData): + return evt.index + + gallery.select(on_select, None, selected_index) +``` + +By [@aliabid94](https://github.com/aliabid94) in [PR 3399](https://github.com/gradio-app/gradio/pull/3399) + +- The `Textbox` component now includes a copy button by [@abidlabs](https://github.com/abidlabs) in [PR 3452](https://github.com/gradio-app/gradio/pull/3452) + +### Bug Fixes: + +- Use `huggingface_hub` to send telemetry on `interface` and `blocks`; eventually to replace segment by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3342](https://github.com/gradio-app/gradio/pull/3342) +- Ensure load events created by components (randomize for slider, callable values) are never queued unless every is passed by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3391](https://github.com/gradio-app/gradio/pull/3391) +- Prevent in-place updates of `generic_update` by shallow copying by [@gitgithan](https://github.com/gitgithan) in [PR 3405](https://github.com/gradio-app/gradio/pull/3405) to fix [#3282](https://github.com/gradio-app/gradio/issues/3282) +- Fix bug caused by not importing `BlockContext` in `utils.py` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3424](https://github.com/gradio-app/gradio/pull/3424) +- Ensure dropdown does not highlight partial matches by [@pngwn](https://github.com/pngwn) in [PR 3421](https://github.com/gradio-app/gradio/pull/3421) +- Fix mic button display by [@aliabid94](https://github.com/aliabid94) in [PR 3456](https://github.com/gradio-app/gradio/pull/3456) + +### Documentation Changes: + +- Added a section on security and access when sharing Gradio apps by [@abidlabs](https://github.com/abidlabs) in [PR 3408](https://github.com/gradio-app/gradio/pull/3408) +- Add Chinese README by [@uanu2002](https://github.com/uanu2002) in [PR 3394](https://github.com/gradio-app/gradio/pull/3394) +- Adds documentation for web components by [@abidlabs](https://github.com/abidlabs) in [PR 3407](https://github.com/gradio-app/gradio/pull/3407) +- Fixed link in Chinese readme by [@eltociear](https://github.com/eltociear) in [PR 3417](https://github.com/gradio-app/gradio/pull/3417) +- Document Blocks methods by [@aliabd](https://github.com/aliabd) in [PR 3427](https://github.com/gradio-app/gradio/pull/3427) +- Fixed bug where event handlers were not showing up in documentation by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3434](https://github.com/gradio-app/gradio/pull/3434) + +### Testing and Infrastructure Changes: + +- Fixes tests that were failing locally but passing on CI by [@abidlabs](https://github.com/abidlabs) in [PR 3411](https://github.com/gradio-app/gradio/pull/3411) +- Remove codecov from the repo by [@aliabd](https://github.com/aliabd) in [PR 3415](https://github.com/gradio-app/gradio/pull/3415) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Prevent in-place updates of `generic_update` by shallow copying by [@gitgithan](https://github.com/gitgithan) in [PR 3405](https://github.com/gradio-app/gradio/pull/3405) to fix [#3282](https://github.com/gradio-app/gradio/issues/3282) +- Persist file names of files uploaded through any Gradio component by [@abidlabs](https://github.com/abidlabs) in [PR 3412](https://github.com/gradio-app/gradio/pull/3412) +- Fix markdown embedded component in docs by [@aliabd](https://github.com/aliabd) in [PR 3410](https://github.com/gradio-app/gradio/pull/3410) +- Clean up event listeners code by [@aliabid94](https://github.com/aliabid94) in [PR 3420](https://github.com/gradio-app/gradio/pull/3420) +- Fix css issue with spaces logo by [@aliabd](https://github.com/aliabd) in [PR 3422](https://github.com/gradio-app/gradio/pull/3422) +- Makes a few fixes to the `JSON` component (show_label parameter, icons) in [@abidlabs](https://github.com/abidlabs) in [PR 3451](https://github.com/gradio-app/gradio/pull/3451) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.20.1 + +### New Features: + +- Add `height` kwarg to style in `gr.Chatbot()` component by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3369](https://github.com/gradio-app/gradio/pull/3369) + +```python +chatbot = gr.Chatbot().style(height=500) +``` + +### Bug Fixes: + +- Ensure uploaded images are always shown in the sketch tool by [@pngwn](https://github.com/pngwn) in [PR 3386](https://github.com/gradio-app/gradio/pull/3386) +- Fixes bug where when if fn is a non-static class member, then self should be ignored as the first param of the fn by [@or25](https://github.com/or25) in [PR #3227](https://github.com/gradio-app/gradio/pull/3227) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.20.0 + +### New Features: + +###### Release event for Slider + +Now you can trigger your python function to run when the slider is released as opposed to every slider change value! + +Simply use the `release` method on the slider + +```python +slider.release(function, inputs=[...], outputs=[...], api_name="predict") +``` + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3353](https://github.com/gradio-app/gradio/pull/3353) + +###### Dropdown Component Updates + +The standard dropdown component now supports searching for choices. Also when `multiselect` is `True`, you can specify `max_choices` to set the maximum number of choices you want the user to be able to select from the dropdown component. + +```python +gr.Dropdown(label="Choose your favorite colors", choices=["red", "blue", "green", "yellow", "orange"], multiselect=True, max_choices=2) +``` + +by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3211](https://github.com/gradio-app/gradio/pull/3211) + +###### Download button for images 🖼️ + +Output images will now automatically have a download button displayed to make it easier to save and share +the results of Machine Learning art models. + +![download_sketch](https://user-images.githubusercontent.com/41651716/221025113-e693bf41-eabd-42b3-a4f2-26f2708d98fe.gif) + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3297](https://github.com/gradio-app/gradio/pull/3297) + +- Updated image upload component to accept all image formats, including lossless formats like .webp by [@fienestar](https://github.com/fienestar) in [PR 3225](https://github.com/gradio-app/gradio/pull/3225) +- Adds a disabled mode to the `gr.Button` component by setting `interactive=False` by [@abidlabs](https://github.com/abidlabs) in [PR 3266](https://github.com/gradio-app/gradio/pull/3266) and [PR 3288](https://github.com/gradio-app/gradio/pull/3288) +- Adds visual feedback to the when the Flag button is clicked, by [@abidlabs](https://github.com/abidlabs) in [PR 3289](https://github.com/gradio-app/gradio/pull/3289) +- Adds ability to set `flagging_options` display text and saved flag separately by [@abidlabs](https://github.com/abidlabs) in [PR 3289](https://github.com/gradio-app/gradio/pull/3289) +- Allow the setting of `brush_radius` for the `Image` component both as a default and via `Image.update()` by [@pngwn](https://github.com/pngwn) in [PR 3277](https://github.com/gradio-app/gradio/pull/3277) +- Added `info=` argument to form components to enable extra context provided to users, by [@aliabid94](https://github.com/aliabid94) in [PR 3291](https://github.com/gradio-app/gradio/pull/3291) +- Allow developers to access the username of a logged-in user from the `gr.Request()` object using the `.username` attribute by [@abidlabs](https://github.com/abidlabs) in [PR 3296](https://github.com/gradio-app/gradio/pull/3296) +- Add `preview` option to `Gallery.style` that launches the gallery in preview mode when first loaded by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3345](https://github.com/gradio-app/gradio/pull/3345) + +### Bug Fixes: + +- Ensure `mirror_webcam` is always respected by [@pngwn](https://github.com/pngwn) in [PR 3245](https://github.com/gradio-app/gradio/pull/3245) +- Fix issue where updated markdown links were not being opened in a new tab by [@gante](https://github.com/gante) in [PR 3236](https://github.com/gradio-app/gradio/pull/3236) +- API Docs Fixes by [@aliabd](https://github.com/aliabd) in [PR 3287](https://github.com/gradio-app/gradio/pull/3287) +- Added a timeout to queue messages as some demos were experiencing infinitely growing queues from active jobs waiting forever for clients to respond by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3196](https://github.com/gradio-app/gradio/pull/3196) +- Fixes the height of rendered LaTeX images so that they match the height of surrounding text by [@abidlabs](https://github.com/abidlabs) in [PR 3258](https://github.com/gradio-app/gradio/pull/3258) and in [PR 3276](https://github.com/gradio-app/gradio/pull/3276) +- Fix bug where matplotlib images where always too small on the front end by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3274](https://github.com/gradio-app/gradio/pull/3274) +- Remove embed's `initial_height` when loading is complete so the embed finds its natural height once it is loaded [@pngwn](https://github.com/pngwn) in [PR 3292](https://github.com/gradio-app/gradio/pull/3292) +- Prevent Sketch from crashing when a default image is provided by [@pngwn](https://github.com/pngwn) in [PR 3277](https://github.com/gradio-app/gradio/pull/3277) +- Respect the `shape` argument on the front end when creating Image Sketches by [@pngwn](https://github.com/pngwn) in [PR 3277](https://github.com/gradio-app/gradio/pull/3277) +- Fix infinite loop caused by setting `Dropdown's` value to be `[]` and adding a change event on the dropdown by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3295](https://github.com/gradio-app/gradio/pull/3295) +- Fix change event listed twice in image docs by [@aliabd](https://github.com/aliabd) in [PR 3318](https://github.com/gradio-app/gradio/pull/3318) +- Fix bug that cause UI to be vertically centered at all times by [@pngwn](https://github.com/pngwn) in [PR 3336](https://github.com/gradio-app/gradio/pull/3336) +- Fix bug where `height` set in `Gallery.style` was not respected by the front-end by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3343](https://github.com/gradio-app/gradio/pull/3343) +- Ensure markdown lists are rendered correctly by [@pngwn](https://github.com/pngwn) in [PR 3341](https://github.com/gradio-app/gradio/pull/3341) +- Ensure that the initial empty value for `gr.Dropdown(Multiselect=True)` is an empty list and the initial value for `gr.Dropdown(Multiselect=False)` is an empty string by [@pngwn](https://github.com/pngwn) in [PR 3338](https://github.com/gradio-app/gradio/pull/3338) +- Ensure uploaded images respect the shape property when the canvas is also enabled by [@pngwn](https://github.com/pngwn) in [PR 3351](https://github.com/gradio-app/gradio/pull/3351) +- Ensure that Google Analytics works correctly when gradio apps are created with `analytics_enabled=True` by [@abidlabs](https://github.com/abidlabs) in [PR 3349](https://github.com/gradio-app/gradio/pull/3349) +- Fix bug where files were being re-uploaded after updates by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3375](https://github.com/gradio-app/gradio/pull/3375) +- Fix error when using backen_fn and custom js at the same time by [@jialeicui](https://github.com/jialeicui) in [PR 3358](https://github.com/gradio-app/gradio/pull/3358) +- Support new embeds for huggingface spaces subdomains by [@pngwn](https://github.com/pngwn) in [PR 3367](https://github.com/gradio-app/gradio/pull/3367) + +### Documentation Changes: + +- Added the `types` field to the dependency field in the config by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3315](https://github.com/gradio-app/gradio/pull/3315) +- Gradio Status Page by [@aliabd](https://github.com/aliabd) in [PR 3331](https://github.com/gradio-app/gradio/pull/3331) +- Adds a Guide on setting up a dashboard from Supabase data using the `gr.BarPlot` + component by [@abidlabs](https://github.com/abidlabs) in [PR 3275](https://github.com/gradio-app/gradio/pull/3275) + +### Testing and Infrastructure Changes: + +- Adds a script to benchmark the performance of the queue and adds some instructions on how to use it. By [@freddyaboulton](https://github.com/freddyaboulton) and [@abidlabs](https://github.com/abidlabs) in [PR 3272](https://github.com/gradio-app/gradio/pull/3272) +- Flaky python tests no longer cancel non-flaky tests by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3344](https://github.com/gradio-app/gradio/pull/3344) + +### Breaking Changes: + +- Chatbot bubble colors can no longer be set by `chatbot.style(color_map=)` by [@aliabid94] in [PR 3370](https://github.com/gradio-app/gradio/pull/3370) + +### Full Changelog: + +- Fixed comment typo in components.py by [@eltociear](https://github.com/eltociear) in [PR 3235](https://github.com/gradio-app/gradio/pull/3235) +- Cleaned up chatbot ui look and feel by [@aliabid94] in [PR 3370](https://github.com/gradio-app/gradio/pull/3370) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.19.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- UI fixes including footer and API docs by [@aliabid94](https://github.com/aliabid94) in [PR 3242](https://github.com/gradio-app/gradio/pull/3242) +- Updated image upload component to accept all image formats, including lossless formats like .webp by [@fienestar](https://github.com/fienestar) in [PR 3225](https://github.com/gradio-app/gradio/pull/3225) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Added backend support for themes by [@aliabid94](https://github.com/aliabid94) in [PR 2931](https://github.com/gradio-app/gradio/pull/2931) +- Added support for button sizes "lg" (default) and "sm". + +### Contributors Shoutout: + +No changes to highlight. + +## 3.19.0 + +### New Features: + +###### Improved embedding experience + +When embedding a spaces-hosted gradio app as a web component, you now get an improved UI linking back to the original space, better error handling and more intelligent load performance. No changes are required to your code to benefit from this enhanced experience; simply upgrade your gradio SDK to the latest version. + +![](https://user-images.githubusercontent.com/12937446/219653294-86937632-72c1-4e93-a77c-af705d49382a.png) + +This behaviour is configurable. You can disable the info panel at the bottom by passing `info="false"`. You can disable the container entirely by passing `container="false"`. + +Error statuses are reported in the UI with an easy way for end-users to report problems to the original space author via the community tab of that Hugginface space: + +![](https://user-images.githubusercontent.com/12937446/219655499-88019443-d694-44e7-9e6d-242e19d10a5c.png) + +By default, gradio apps are lazy loaded, vastly improving performance when there are several demos on the page. Metadata is loaded ahead of time, but the space will only be loaded and rendered when it is in view. + +This behaviour is configurable. You can pass `eager="true"` to load and render the space regardless of whether or not it is currently on the screen. + +by [@pngwn](https://github.com/pngwn) in [PR 3205](https://github.com/gradio-app/gradio/pull/3205) + +###### New `gr.BarPlot` component! 📊 + +Create interactive bar plots from a high-level interface with `gr.BarPlot`. +No need to remember matplotlib syntax anymore! + +Example usage: + +```python +import gradio as gr +import pandas as pd + +simple = pd.DataFrame({ + 'a': ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I'], + 'b': [28, 55, 43, 91, 81, 53, 19, 87, 52] +}) + +with gr.Blocks() as demo: + gr.BarPlot( + simple, + x="a", + y="b", + title="Simple Bar Plot with made up data", + tooltip=['a', 'b'], + ) + +demo.launch() +``` + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3157](https://github.com/gradio-app/gradio/pull/3157) + +###### Bokeh plots are back! 🌠 + +Fixed a bug that prevented bokeh plots from being displayed on the front end and extended support for both 2.x and 3.x versions of bokeh! + +![image](https://user-images.githubusercontent.com/41651716/219468324-0d82e07f-8fb4-4ff9-b40c-8250b29e45f7.png) + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3212](https://github.com/gradio-app/gradio/pull/3212) + +### Bug Fixes: + +- Adds ability to add a single message from the bot or user side. Ex: specify `None` as the second value in the tuple, to add a single message in the chatbot from the "bot" side. + +```python +gr.Chatbot([("Hi, I'm DialoGPT. Try asking me a question.", None)]) +``` + +By [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3165](https://github.com/gradio-app/gradio/pull/3165) + +- Fixes `gr.utils.delete_none` to only remove props whose values are `None` from the config by [@abidlabs](https://github.com/abidlabs) in [PR 3188](https://github.com/gradio-app/gradio/pull/3188) +- Fix bug where embedded demos were not loading files properly by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3177](https://github.com/gradio-app/gradio/pull/3177) +- The `change` event is now triggered when users click the 'Clear All' button of the multiselect DropDown component by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3195](https://github.com/gradio-app/gradio/pull/3195) +- Stops File component from freezing when a large file is uploaded by [@aliabid94](https://github.com/aliabid94) in [PR 3191](https://github.com/gradio-app/gradio/pull/3191) +- Support Chinese pinyin in Dataframe by [@aliabid94](https://github.com/aliabid94) in [PR 3206](https://github.com/gradio-app/gradio/pull/3206) +- The `clear` event is now triggered when images are cleared by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3218](https://github.com/gradio-app/gradio/pull/3218) +- Fix bug where auth cookies where not sent when connecting to an app via http by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3223](https://github.com/gradio-app/gradio/pull/3223) +- Ensure latext CSS is always applied in light and dark mode by [@pngwn](https://github.com/pngwn) in [PR 3233](https://github.com/gradio-app/gradio/pull/3233) + +### Documentation Changes: + +- Sort components in docs by alphabetic order by [@aliabd](https://github.com/aliabd) in [PR 3152](https://github.com/gradio-app/gradio/pull/3152) +- Changes to W&B guide by [@scottire](https://github.com/scottire) in [PR 3153](https://github.com/gradio-app/gradio/pull/3153) +- Keep pnginfo metadata for gallery by [@wfng92](https://github.com/wfng92) in [PR 3150](https://github.com/gradio-app/gradio/pull/3150) +- Add a section on how to run a Gradio app locally [@osanseviero](https://github.com/osanseviero) in [PR 3170](https://github.com/gradio-app/gradio/pull/3170) +- Fixed typos in gradio events function documentation by [@vidalmaxime](https://github.com/vidalmaxime) in [PR 3168](https://github.com/gradio-app/gradio/pull/3168) +- Added an example using Gradio's batch mode with the diffusers library by [@abidlabs](https://github.com/abidlabs) in [PR 3224](https://github.com/gradio-app/gradio/pull/3224) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Fix demos page css and add close demos button by [@aliabd](https://github.com/aliabd) in [PR 3151](https://github.com/gradio-app/gradio/pull/3151) +- Caches temp files from base64 input data by giving them a deterministic path based on the contents of data by [@abidlabs](https://github.com/abidlabs) in [PR 3197](https://github.com/gradio-app/gradio/pull/3197) +- Better warnings (when there is a mismatch between the number of output components and values returned by a function, or when the `File` component or `UploadButton` component includes a `file_types` parameter along with `file_count=="dir"`) by [@abidlabs](https://github.com/abidlabs) in [PR 3194](https://github.com/gradio-app/gradio/pull/3194) +- Raises a `gr.Error` instead of a regular Python error when you use `gr.Interface.load()` to load a model and there's an error querying the HF API by [@abidlabs](https://github.com/abidlabs) in [PR 3194](https://github.com/gradio-app/gradio/pull/3194) +- Fixed gradio share links so that they are persistent and do not reset if network + connection is disrupted by by [XciD](https://github.com/XciD), [Wauplin](https://github.com/Wauplin), and [@abidlabs](https://github.com/abidlabs) in [PR 3149](https://github.com/gradio-app/gradio/pull/3149) and a follow-up to allow it to work for users upgrading from a previous Gradio version in [PR 3221](https://github.com/gradio-app/gradio/pull/3221) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.18.0 + +### New Features: + +###### Revamped Stop Button for Interfaces 🛑 + +If your Interface function is a generator, there used to be a separate `Stop` button displayed next +to the `Submit` button. + +We've revamed the `Submit` button so that it turns into a `Stop` button during the generation process. +Clicking on the `Stop` button will cancel the generation and turn it back to a `Submit` button. +The `Stop` button will automatically turn back to a `Submit` button at the end of the generation if you don't use it! + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3124](https://github.com/gradio-app/gradio/pull/3124) + +###### Queue now works with reload mode! + +You can now call `queue` on your `demo` outside of the `if __name__ == "__main__"` block and +run the script in reload mode with the `gradio` command. + +Any changes to the `app.py` file will be reflected in the webpage automatically and the queue will work +properly! + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3089](https://github.com/gradio-app/gradio/pull/3089) + +###### Allow serving files from additional directories + +```python +demo = gr.Interface(...) +demo.launch( + file_directories=["/var/lib/demo/path/to/resources"] +) +``` + +By [@maxaudron](https://github.com/maxaudron) in [PR 3075](https://github.com/gradio-app/gradio/pull/3075) + +### Bug Fixes: + +- Fixes URL resolution on Windows by [@abidlabs](https://github.com/abidlabs) in [PR 3108](https://github.com/gradio-app/gradio/pull/3108) +- Example caching now works with components without a label attribute (e.g. `Column`) by [@abidlabs](https://github.com/abidlabs) in [PR 3123](https://github.com/gradio-app/gradio/pull/3123) +- Ensure the Video component correctly resets the UI state when a new video source is loaded and reduce choppiness of UI by [@pngwn](https://github.com/abidlabs) in [PR 3117](https://github.com/gradio-app/gradio/pull/3117) +- Fixes loading private Spaces by [@abidlabs](https://github.com/abidlabs) in [PR 3068](https://github.com/gradio-app/gradio/pull/3068) +- Added a warning when attempting to launch an `Interface` via the `%%blocks` jupyter notebook magic command by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3126](https://github.com/gradio-app/gradio/pull/3126) +- Fixes bug where interactive output image cannot be set when in edit mode by [@dawoodkhan82](https://github.com/@dawoodkhan82) in [PR 3135](https://github.com/gradio-app/gradio/pull/3135) +- A share link will automatically be created when running on Sagemaker notebooks so that the front-end is properly displayed by [@abidlabs](https://github.com/abidlabs) in [PR 3137](https://github.com/gradio-app/gradio/pull/3137) +- Fixes a few dropdown component issues; hide checkmark next to options as expected, and keyboard hover is visible by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3145]https://github.com/gradio-app/gradio/pull/3145) +- Fixed bug where example pagination buttons were not visible in dark mode or displayed under the examples table. By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3144](https://github.com/gradio-app/gradio/pull/3144) +- Fixed bug where the font color of axis labels and titles for native plots did not respond to dark mode preferences. By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3146](https://github.com/gradio-app/gradio/pull/3146) + +### Documentation Changes: + +- Added a guide on the 4 kinds of Gradio Interfaces by [@yvrjsharma](https://github.com/yvrjsharma) and [@abidlabs](https://github.com/abidlabs) in [PR 3003](https://github.com/gradio-app/gradio/pull/3003) +- Explained that the parameters in `launch` will not be respected when using reload mode, e.g. `gradio` command by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3089](https://github.com/gradio-app/gradio/pull/3089) +- Added a demo to show how to set up variable numbers of outputs in Gradio by [@abidlabs](https://github.com/abidlabs) in [PR 3127](https://github.com/gradio-app/gradio/pull/3127) +- Updated docs to reflect that the `equal_height` parameter should be passed to the `.style()` method of `gr.Row()` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3125](https://github.com/gradio-app/gradio/pull/3125) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Changed URL of final image for `fake_diffusion` demos by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3120](https://github.com/gradio-app/gradio/pull/3120) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.17.1 + +### New Features: + +###### iOS image rotation fixed 🔄 + +Previously photos uploaded via iOS would be rotated after processing. This has been fixed by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3089](https://github.com/gradio-app/gradio/pull/3091) + +######### Before + +![image](https://user-images.githubusercontent.com/41651716/215846507-a36e9d05-1ac2-4867-8ab3-ce045a9415d9.png) + +######### After + +![image](https://user-images.githubusercontent.com/41651716/215846554-e41773ed-70f0-491a-9952-6a18babf91ef.png) + +###### Run on Kaggle kernels 🧪 + +A share link will automatically be created when running on Kaggle kernels (notebooks) so that the front-end is properly displayed. + +![image](https://user-images.githubusercontent.com/41651716/216104254-2cf55599-449c-436c-b57e-40f6a83f9eee.png) + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3101](https://github.com/gradio-app/gradio/pull/3101) + +### Bug Fixes: + +- Fix bug where examples were not rendered correctly for demos created with Blocks api that had multiple input compinents by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3090](https://github.com/gradio-app/gradio/pull/3090) +- Fix change event listener for JSON, HighlightedText, Chatbot by [@aliabid94](https://github.com/aliabid94) in [PR 3095](https://github.com/gradio-app/gradio/pull/3095) +- Fixes bug where video and file change event not working [@tomchang25](https://github.com/tomchang25) in [PR 3098](https://github.com/gradio-app/gradio/pull/3098) +- Fixes bug where static_video play and pause event not working [@tomchang25](https://github.com/tomchang25) in [PR 3098](https://github.com/gradio-app/gradio/pull/3098) +- Fixed `Gallery.style(grid=...)` by by [@aliabd](https://github.com/aliabd) in [PR 3107](https://github.com/gradio-app/gradio/pull/3107) + +### Documentation Changes: + +- Update chatbot guide to include blocks demo and markdown support section by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3023](https://github.com/gradio-app/gradio/pull/3023) + +* Fix a broken link in the Quick Start guide, by [@cakiki](https://github.com/cakiki) in [PR 3109](https://github.com/gradio-app/gradio/pull/3109) +* Better docs navigation on mobile by [@aliabd](https://github.com/aliabd) in [PR 3112](https://github.com/gradio-app/gradio/pull/3112) +* Add a guide on using Gradio with [Comet](https://comet.com/), by [@DN6](https://github.com/DN6/) in [PR 3058](https://github.com/gradio-app/gradio/pull/3058) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Set minimum `markdown-it-py` version to `2.0.0` so that the dollar math plugin is compatible by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3102](https://github.com/gradio-app/gradio/pull/3102) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.17.0 + +### New Features: + +###### Extended support for Interface.load! 🏗️ + +You can now load `image-to-text` and `conversational` pipelines from the hub! + +###### Image-to-text Demo + +```python +io = gr.Interface.load("models/nlpconnect/vit-gpt2-image-captioning", + api_key="") +io.launch() +``` + +image + +###### conversational Demo + +```python +chatbot = gr.Interface.load("models/microsoft/DialoGPT-medium", + api_key="") +chatbot.launch() +``` + +![chatbot_load](https://user-images.githubusercontent.com/41651716/213260220-3eaa25b7-a38b-48c6-adeb-2718bdf297a2.gif) + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3011](https://github.com/gradio-app/gradio/pull/3011) + +###### Download Button added to Model3D Output Component 📥 + +No need for an additional file output component to enable model3d file downloads anymore. We now added a download button to the model3d component itself. + +Screenshot 2023-01-18 at 3 52 45 PM + +By [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3014](https://github.com/gradio-app/gradio/pull/3014) + +###### Fixing Auth on Spaces 🔑 + +Authentication on spaces works now! Third party cookies must be enabled on your browser to be able +to log in. Some browsers disable third party cookies by default (Safari, Chrome Incognito). + +![auth_spaces](https://user-images.githubusercontent.com/41651716/215528417-09538933-0576-4d1d-b3b9-1e877ab01905.gif) + +### Bug Fixes: + +- Fixes bug where interpretation event was not configured correctly by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2993](https://github.com/gradio-app/gradio/pull/2993) +- Fix relative import bug in reload mode by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2992](https://github.com/gradio-app/gradio/pull/2992) +- Fixes bug where png files were not being recognized when uploading images by [@abidlabs](https://github.com/abidlabs) in [PR 3002](https://github.com/gradio-app/gradio/pull/3002) +- Fixes bug where external Spaces could not be loaded and used as functions if they returned files by [@abidlabs](https://github.com/abidlabs) in [PR 3004](https://github.com/gradio-app/gradio/pull/3004) +- Fix bug where file serialization output was not JSON serializable by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2999](https://github.com/gradio-app/gradio/pull/2999) +- Fixes bug where png files were not being recognized when uploading images by [@abidlabs](https://github.com/abidlabs) in [PR 3002](https://github.com/gradio-app/gradio/pull/3002) +- Fixes bug where temporary uploaded files were not being added to temp sets by [@abidlabs](https://github.com/abidlabs) in [PR 3005](https://github.com/gradio-app/gradio/pull/3005) +- Fixes issue where markdown support in chatbot breaks older demos [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3006](https://github.com/gradio-app/gradio/pull/3006) +- Fixes the `/file/` route that was broken in a recent change in [PR 3010](https://github.com/gradio-app/gradio/pull/3010) +- Fix bug where the Image component could not serialize image urls by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2957](https://github.com/gradio-app/gradio/pull/2957) +- Fix forwarding for guides after SEO renaming by [@aliabd](https://github.com/aliabd) in [PR 3017](https://github.com/gradio-app/gradio/pull/3017) +- Switch all pages on the website to use latest stable gradio by [@aliabd](https://github.com/aliabd) in [PR 3016](https://github.com/gradio-app/gradio/pull/3016) +- Fix bug related to deprecated parameters in `huggingface_hub` for the HuggingFaceDatasetSaver in [PR 3025](https://github.com/gradio-app/gradio/pull/3025) +- Added better support for symlinks in the way absolute paths are resolved by [@abidlabs](https://github.com/abidlabs) in [PR 3037](https://github.com/gradio-app/gradio/pull/3037) +- Fix several minor frontend bugs (loading animation, examples as gallery) frontend [@aliabid94](https://github.com/3026) in [PR 2961](https://github.com/gradio-app/gradio/pull/3026). +- Fixes bug that the chatbot sample code does not work with certain input value by [@petrov826](https://github.com/petrov826) in [PR 3039](https://github.com/gradio-app/gradio/pull/3039). +- Fix shadows for form element and ensure focus styles more visible in dark mode [@pngwn](https://github.com/pngwn) in [PR 3042](https://github.com/gradio-app/gradio/pull/3042). +- Fixed bug where the Checkbox and Dropdown change events were not triggered in response to other component changes by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3045](https://github.com/gradio-app/gradio/pull/3045) +- Fix bug where the queue was not properly restarted after launching a `closed` app by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3022](https://github.com/gradio-app/gradio/pull/3022) +- Adding missing embedded components on docs by [@aliabd](https://github.com/aliabd) in [PR 3027](https://github.com/gradio-app/gradio/pull/3027) +- Fixes bug where app would crash if the `file_types` parameter of `gr.File` or `gr.UploadButton` was not a list by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3048](https://github.com/gradio-app/gradio/pull/3048) +- Ensure CSS mounts correctly regardless of how many Gradio instances are on the page [@pngwn](https://github.com/pngwn) in [PR 3059](https://github.com/gradio-app/gradio/pull/3059). +- Fix bug where input component was not hidden in the frontend for `UploadButton` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3053](https://github.com/gradio-app/gradio/pull/3053) +- Fixes issue where after clicking submit or undo, the sketch output wouldn't clear. [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3047](https://github.com/gradio-app/gradio/pull/3047) +- Ensure spaces embedded via the web component always use the correct URLs for server requests and change ports for testing to avoid strange collisions when users are working with embedded apps locally by [@pngwn](https://github.com/pngwn) in [PR 3065](https://github.com/gradio-app/gradio/pull/3065) +- Preserve selected image of Gallery through updated by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3061](https://github.com/gradio-app/gradio/pull/3061) +- Fix bug where auth was not respected on HF spaces by [@freddyaboulton](https://github.com/freddyaboulton) and [@aliabid94](https://github.com/aliabid94) in [PR 3049](https://github.com/gradio-app/gradio/pull/3049) +- Fixes bug where tabs selected attribute not working if manually change tab by [@tomchang25](https://github.com/tomchang25) in [3055](https://github.com/gradio-app/gradio/pull/3055) +- Change chatbot to show dots on progress, and fix bug where chatbot would not stick to bottom in the case of images by [@aliabid94](https://github.com/aliabid94) in [PR 3067](https://github.com/gradio-app/gradio/pull/3079) + +### Documentation Changes: + +- SEO improvements to guides by[@aliabd](https://github.com/aliabd) in [PR 2915](https://github.com/gradio-app/gradio/pull/2915) +- Use `gr.LinePlot` for the `blocks_kinematics` demo by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2998](https://github.com/gradio-app/gradio/pull/2998) +- Updated the `interface_series_load` to include some inline markdown code by [@abidlabs](https://github.com/abidlabs) in [PR 3051](https://github.com/gradio-app/gradio/pull/3051) + +### Testing and Infrastructure Changes: + +- Adds a GitHub action to test if any large files (> 5MB) are present by [@abidlabs](https://github.com/abidlabs) in [PR 3013](https://github.com/gradio-app/gradio/pull/3013) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Rewrote frontend using CSS variables for themes by [@pngwn](https://github.com/pngwn) in [PR 2840](https://github.com/gradio-app/gradio/pull/2840) +- Moved telemetry requests to run on background threads by [@abidlabs](https://github.com/abidlabs) in [PR 3054](https://github.com/gradio-app/gradio/pull/3054) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.16.2 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fixed file upload fails for files with zero size by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2923](https://github.com/gradio-app/gradio/pull/2923) +- Fixed bug where `mount_gradio_app` would not launch if the queue was enabled in a gradio app by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2939](https://github.com/gradio-app/gradio/pull/2939) +- Fix custom long CSS handling in Blocks by [@anton-l](https://github.com/anton-l) in [PR 2953](https://github.com/gradio-app/gradio/pull/2953) +- Recovers the dropdown change event by [@abidlabs](https://github.com/abidlabs) in [PR 2954](https://github.com/gradio-app/gradio/pull/2954). +- Fix audio file output by [@aliabid94](https://github.com/aliabid94) in [PR 2961](https://github.com/gradio-app/gradio/pull/2961). +- Fixed bug where file extensions of really long files were not kept after download by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2929](https://github.com/gradio-app/gradio/pull/2929) +- Fix bug where outputs for examples where not being returned by the backend by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2955](https://github.com/gradio-app/gradio/pull/2955) +- Fix bug in `blocks_plug` demo that prevented switching tabs programmatically with python [@TashaSkyUp](https://github.com/https://github.com/TashaSkyUp) in [PR 2971](https://github.com/gradio-app/gradio/pull/2971). + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.16.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Fix audio file output by [@aliabid94](https://github.com/aliabid94) in [PR 2950](https://github.com/gradio-app/gradio/pull/2950). + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.16.0 + +### New Features: + +###### Send custom progress updates by adding a `gr.Progress` argument after the input arguments to any function. Example: + +```python +def reverse(word, progress=gr.Progress()): + progress(0, desc="Starting") + time.sleep(1) + new_string = "" + for letter in progress.tqdm(word, desc="Reversing"): + time.sleep(0.25) + new_string = letter + new_string + return new_string + +demo = gr.Interface(reverse, gr.Text(), gr.Text()) +``` + +Progress indicator bar by [@aliabid94](https://github.com/aliabid94) in [PR 2750](https://github.com/gradio-app/gradio/pull/2750). + +- Added `title` argument to `TabbedInterface` by @MohamedAliRashad in [#2888](https://github.com/gradio-app/gradio/pull/2888) +- Add support for specifying file extensions for `gr.File` and `gr.UploadButton`, using `file_types` parameter (e.g `gr.File(file_count="multiple", file_types=["text", ".json", ".csv"])`) by @dawoodkhan82 in [#2901](https://github.com/gradio-app/gradio/pull/2901) +- Added `multiselect` option to `Dropdown` by @dawoodkhan82 in [#2871](https://github.com/gradio-app/gradio/pull/2871) + +###### With `multiselect` set to `true` a user can now select multiple options from the `gr.Dropdown` component. + +```python +gr.Dropdown(["angola", "pakistan", "canada"], multiselect=True, value=["angola"]) +``` + +Screenshot 2023-01-03 at 4 14 36 PM + +### Bug Fixes: + +- Fixed bug where an error opening an audio file led to a crash by [@FelixDombek](https://github.com/FelixDombek) in [PR 2898](https://github.com/gradio-app/gradio/pull/2898) +- Fixed bug where setting `default_enabled=False` made it so that the entire queue did not start by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2876](https://github.com/gradio-app/gradio/pull/2876) +- Fixed bug where csv preview for DataFrame examples would show filename instead of file contents by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2877](https://github.com/gradio-app/gradio/pull/2877) +- Fixed bug where an error raised after yielding iterative output would not be displayed in the browser by + [@JaySmithWpg](https://github.com/JaySmithWpg) in [PR 2889](https://github.com/gradio-app/gradio/pull/2889) +- Fixed bug in `blocks_style` demo that was preventing it from launching by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2890](https://github.com/gradio-app/gradio/pull/2890) +- Fixed bug where files could not be downloaded by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2926](https://github.com/gradio-app/gradio/pull/2926) +- Fixed bug where cached examples were not displaying properly by [@a-rogalska](https://github.com/a-rogalska) in [PR 2974](https://github.com/gradio-app/gradio/pull/2974) + +### Documentation Changes: + +- Added a Guide on using Google Sheets to create a real-time dashboard with Gradio's `DataFrame` and `LinePlot` component, by [@abidlabs](https://github.com/abidlabs) in [PR 2816](https://github.com/gradio-app/gradio/pull/2816) +- Add a components - events matrix on the docs by [@aliabd](https://github.com/aliabd) in [PR 2921](https://github.com/gradio-app/gradio/pull/2921) + +### Testing and Infrastructure Changes: + +- Deployed PRs from forks to spaces by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2895](https://github.com/gradio-app/gradio/pull/2895) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- The `default_enabled` parameter of the `Blocks.queue` method has no effect by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2876](https://github.com/gradio-app/gradio/pull/2876) +- Added typing to several Python files in codebase by [@abidlabs](https://github.com/abidlabs) in [PR 2887](https://github.com/gradio-app/gradio/pull/2887) +- Excluding untracked files from demo notebook check action by [@aliabd](https://github.com/aliabd) in [PR 2897](https://github.com/gradio-app/gradio/pull/2897) +- Optimize images and gifs by [@aliabd](https://github.com/aliabd) in [PR 2922](https://github.com/gradio-app/gradio/pull/2922) +- Updated typing by [@1nF0rmed](https://github.com/1nF0rmed) in [PR 2904](https://github.com/gradio-app/gradio/pull/2904) + +### Contributors Shoutout: + +- @JaySmithWpg for making their first contribution to gradio! +- @MohamedAliRashad for making their first contribution to gradio! + +## 3.15.0 + +### New Features: + +Gradio's newest plotting component `gr.LinePlot`! 📈 + +With this component you can easily create time series visualizations with customizable +appearance for your demos and dashboards ... all without having to know an external plotting library. + +For an example of the api see below: + +```python +gr.LinePlot(stocks, + x="date", + y="price", + color="symbol", + color_legend_position="bottom", + width=600, height=400, title="Stock Prices") +``` + +![image](https://user-images.githubusercontent.com/41651716/208711646-81ae3745-149b-46a3-babd-0569aecdd409.png) + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2807](https://github.com/gradio-app/gradio/pull/2807) + +### Bug Fixes: + +- Fixed bug where the `examples_per_page` parameter of the `Examples` component was not passed to the internal `Dataset` component by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2861](https://github.com/gradio-app/gradio/pull/2861) +- Fixes loading Spaces that have components with default values by [@abidlabs](https://github.com/abidlabs) in [PR 2855](https://github.com/gradio-app/gradio/pull/2855) +- Fixes flagging when `allow_flagging="auto"` in `gr.Interface()` by [@abidlabs](https://github.com/abidlabs) in [PR 2695](https://github.com/gradio-app/gradio/pull/2695) +- Fixed bug where passing a non-list value to `gr.CheckboxGroup` would crash the entire app by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2866](https://github.com/gradio-app/gradio/pull/2866) + +### Documentation Changes: + +- Added a Guide on using BigQuery with Gradio's `DataFrame` and `ScatterPlot` component, + by [@abidlabs](https://github.com/abidlabs) in [PR 2794](https://github.com/gradio-app/gradio/pull/2794) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Fixed importing gradio can cause PIL.Image.registered_extensions() to break by `[@aliencaocao](https://github.com/aliencaocao)` in `[PR 2846](https://github.com/gradio-app/gradio/pull/2846)` +- Fix css glitch and navigation in docs by [@aliabd](https://github.com/aliabd) in [PR 2856](https://github.com/gradio-app/gradio/pull/2856) +- Added the ability to set `x_lim`, `y_lim` and legend positions for `gr.ScatterPlot` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2807](https://github.com/gradio-app/gradio/pull/2807) +- Remove footers and min-height the correct way by [@aliabd](https://github.com/aliabd) in [PR 2860](https://github.com/gradio-app/gradio/pull/2860) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.14.0 + +### New Features: + +###### Add Waveform Visual Support to Audio + +Adds a `gr.make_waveform()` function that creates a waveform video by combining an audio and an optional background image by [@dawoodkhan82](http://github.com/dawoodkhan82) and [@aliabid94](http://github.com/aliabid94) in [PR 2706](https://github.com/gradio-app/gradio/pull/2706. Helpful for making audio outputs much more shareable. + +![waveform screenrecording](https://user-images.githubusercontent.com/7870876/206062396-164a5e71-451a-4fe0-94a7-cbe9269d57e6.gif) + +###### Allows Every Component to Accept an `every` Parameter + +When a component's initial value is a function, the `every` parameter re-runs the function every `every` seconds. By [@abidlabs](https://github.com/abidlabs) in [PR 2806](https://github.com/gradio-app/gradio/pull/2806). Here's a code example: + +```py +import gradio as gr + +with gr.Blocks() as demo: + df = gr.DataFrame(run_query, every=60*60) + +demo.queue().launch() +``` + +### Bug Fixes: + +- Fixed issue where too many temporary files were created, all with randomly generated + filepaths. Now fewer temporary files are created and are assigned a path that is a + hash based on the file contents by [@abidlabs](https://github.com/abidlabs) in [PR 2758](https://github.com/gradio-app/gradio/pull/2758) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.13.2 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +\*No changes to highlight. + +- + +### Documentation Changes: + +- Improves documentation of several queuing-related parameters by [@abidlabs](https://github.com/abidlabs) in [PR 2825](https://github.com/gradio-app/gradio/pull/2825) + +### Testing and Infrastructure Changes: + +- Remove h11 pinning by [@ecederstrand](https://github.com/ecederstrand) in [PR 2820](https://github.com/gradio-app/gradio/pull/2820) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +No changes to highlight. + +### Contributors Shoutout: + +No changes to highlight. + +## 3.13.1 + +### New Features: + +###### New Shareable Links + +Replaces tunneling logic based on ssh port-forwarding to that based on `frp` by [XciD](https://github.com/XciD) and [Wauplin](https://github.com/Wauplin) in [PR 2509](https://github.com/gradio-app/gradio/pull/2509) + +You don't need to do anything differently, but when you set `share=True` in `launch()`, +you'll get this message and a public link that look a little bit different: + +```bash +Setting up a public link... we have recently upgraded the way public links are generated. If you encounter any problems, please downgrade to gradio version 3.13.0 +. +Running on public URL: https://bec81a83-5b5c-471e.gradio.live +``` + +These links are a more secure and scalable way to create shareable demos! + +### Bug Fixes: + +- Allows `gr.Dataframe()` to take a `pandas.DataFrame` that includes numpy array and other types as its initial value, by [@abidlabs](https://github.com/abidlabs) in [PR 2804](https://github.com/gradio-app/gradio/pull/2804) +- Add `altair` to requirements.txt by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2811](https://github.com/gradio-app/gradio/pull/2811) +- Added aria-labels to icon buttons that are built into UI components by [@emilyuhde](http://github.com/emilyuhde) in [PR 2791](https://github.com/gradio-app/gradio/pull/2791) + +### Documentation Changes: + +- Fixed some typos in the "Plot Component for Maps" guide by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2811](https://github.com/gradio-app/gradio/pull/2811) + +### Testing and Infrastructure Changes: + +- Fixed test for IP address by [@abidlabs](https://github.com/abidlabs) in [PR 2808](https://github.com/gradio-app/gradio/pull/2808) + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Fixed typo in parameter `visible` in classes in `templates.py` by [@abidlabs](https://github.com/abidlabs) in [PR 2805](https://github.com/gradio-app/gradio/pull/2805) +- Switched external service for getting IP address from `https://api.ipify.org` to `https://checkip.amazonaws.com/` by [@abidlabs](https://github.com/abidlabs) in [PR 2810](https://github.com/gradio-app/gradio/pull/2810) + +### Contributors Shoutout: + +No changes to highlight. + +- Fixed typo in parameter `visible` in classes in `templates.py` by [@abidlabs](https://github.com/abidlabs) in [PR 2805](https://github.com/gradio-app/gradio/pull/2805) +- Switched external service for getting IP address from `https://api.ipify.org` to `https://checkip.amazonaws.com/` by [@abidlabs](https://github.com/abidlabs) in [PR 2810](https://github.com/gradio-app/gradio/pull/2810) + +## 3.13.0 + +### New Features: + +###### Scatter plot component + +It is now possible to create a scatter plot natively in Gradio! + +The `gr.ScatterPlot` component accepts a pandas dataframe and some optional configuration parameters +and will automatically create a plot for you! + +This is the first of many native plotting components in Gradio! + +For an example of how to use `gr.ScatterPlot` see below: + +```python +import gradio as gr +from vega_datasets import data + +cars = data.cars() + +with gr.Blocks() as demo: + gr.ScatterPlot(show_label=False, + value=cars, + x="Horsepower", + y="Miles_per_Gallon", + color="Origin", + tooltip="Name", + title="Car Data", + y_title="Miles per Gallon", + color_legend_title="Origin of Car").style(container=False) + +demo.launch() +``` + +image + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2764](https://github.com/gradio-app/gradio/pull/2764) + +###### Support for altair plots + +The `Plot` component can now accept altair plots as values! +Simply return an altair plot from your event listener and gradio will display it in the front-end. +See the example below: + +```python +import gradio as gr +import altair as alt +from vega_datasets import data + +cars = data.cars() +chart = ( + alt.Chart(cars) + .mark_point() + .encode( + x="Horsepower", + y="Miles_per_Gallon", + color="Origin", + ) +) + +with gr.Blocks() as demo: + gr.Plot(value=chart) +demo.launch() +``` + +image + +By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2741](https://github.com/gradio-app/gradio/pull/2741) + +###### Set the background color of a Label component + +The `Label` component now accepts a `color` argument by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2736](https://github.com/gradio-app/gradio/pull/2736). +The `color` argument should either be a valid css color name or hexadecimal string. +You can update the color with `gr.Label.update`! + +This lets you create Alert and Warning boxes with the `Label` component. See below: + +```python +import gradio as gr +import random + +def update_color(value): + if value < 0: + # This is bad so use red + return "#FF0000" + elif 0 <= value <= 20: + # Ok but pay attention (use orange) + return "#ff9966" + else: + # Nothing to worry about + return None + +def update_value(): + choice = random.choice(['good', 'bad', 'so-so']) + color = update_color(choice) + return gr.Label.update(value=choice, color=color) + + +with gr.Blocks() as demo: + label = gr.Label(value=-10) + demo.load(lambda: update_value(), inputs=None, outputs=[label], every=1) +demo.queue().launch() +``` + +![label_bg_color_update](https://user-images.githubusercontent.com/41651716/204400372-80e53857-f26f-4a38-a1ae-1acadff75e89.gif) + +###### Add Brazilian Portuguese translation + +Add Brazilian Portuguese translation (pt-BR.json) by [@pstwh](http://github.com/pstwh) in [PR 2753](https://github.com/gradio-app/gradio/pull/2753): + +image + +### Bug Fixes: + +- Fixed issue where image thumbnails were not showing when an example directory was provided + by [@abidlabs](https://github.com/abidlabs) in [PR 2745](https://github.com/gradio-app/gradio/pull/2745) +- Fixed bug loading audio input models from the hub by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2779](https://github.com/gradio-app/gradio/pull/2779). +- Fixed issue where entities were not merged when highlighted text was generated from the + dictionary inputs [@payoto](https://github.com/payoto) in [PR 2767](https://github.com/gradio-app/gradio/pull/2767) +- Fixed bug where generating events did not finish running even if the websocket connection was closed by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2783](https://github.com/gradio-app/gradio/pull/2783). + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Images in the chatbot component are now resized if they exceed a max width by [@abidlabs](https://github.com/abidlabs) in [PR 2748](https://github.com/gradio-app/gradio/pull/2748) +- Missing parameters have been added to `gr.Blocks().load()` by [@abidlabs](https://github.com/abidlabs) in [PR 2755](https://github.com/gradio-app/gradio/pull/2755) +- Deindex share URLs from search by [@aliabd](https://github.com/aliabd) in [PR 2772](https://github.com/gradio-app/gradio/pull/2772) +- Redirect old links and fix broken ones by [@aliabd](https://github.com/aliabd) in [PR 2774](https://github.com/gradio-app/gradio/pull/2774) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.12.0 + +### New Features: + +###### The `Chatbot` component now supports a subset of Markdown (including bold, italics, code, images) + +You can now pass in some Markdown to the Chatbot component and it will show up, +meaning that you can pass in images as well! by [@abidlabs](https://github.com/abidlabs) in [PR 2731](https://github.com/gradio-app/gradio/pull/2731) + +Here's a simple example that references a local image `lion.jpg` that is in the same +folder as the Python script: + +```py +import gradio as gr + +with gr.Blocks() as demo: + gr.Chatbot([("hi", "hello **abubakar**"), ("![](/file=lion.jpg)", "cool pic")]) + +demo.launch() +``` + +![Alt text](https://user-images.githubusercontent.com/1778297/204357455-5c1a4002-eee7-479d-9a1e-ba2c12522723.png) + +To see a more realistic example, see the new demo `/demo/chatbot_multimodal/run.py`. + +###### Latex support + +Added mathtext (a subset of latex) support to gr.Markdown. Added by [@kashif](https://github.com/kashif) and [@aliabid94](https://github.com/aliabid94) in [PR 2696](https://github.com/gradio-app/gradio/pull/2696). + +Example of how it can be used: + +```python +gr.Markdown( + r""" + # Hello World! $\frac{\sqrt{x + y}}{4}$ is today's lesson. + """) +``` + +###### Update Accordion properties from the backend + +You can now update the Accordion `label` and `open` status with `gr.Accordion.update` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2690](https://github.com/gradio-app/gradio/pull/2690) + +```python +import gradio as gr + +with gr.Blocks() as demo: + with gr.Accordion(label="Open for greeting", open=False) as accordion: + gr.Textbox("Hello!") + open_btn = gr.Button(value="Open Accordion") + close_btn = gr.Button(value="Close Accordion") + open_btn.click( + lambda: gr.Accordion.update(open=True, label="Open Accordion"), + inputs=None, + outputs=[accordion], + ) + close_btn.click( + lambda: gr.Accordion.update(open=False, label="Closed Accordion"), + inputs=None, + outputs=[accordion], + ) +demo.launch() +``` + +![update_accordion](https://user-images.githubusercontent.com/41651716/203164176-b102eae3-babe-4986-ae30-3ab4f400cedc.gif) + +### Bug Fixes: + +- Fixed bug where requests timeout is missing from utils.version_check() by [@yujiehecs](https://github.com/yujiehecs) in [PR 2729](https://github.com/gradio-app/gradio/pull/2729) +- Fixed bug where so that the `File` component can properly preprocess files to "binary" byte-string format by [CoffeeVampir3](https://github.com/CoffeeVampir3) in [PR 2727](https://github.com/gradio-app/gradio/pull/2727) +- Fixed bug to ensure that filenames are less than 200 characters even for non-English languages by [@SkyTNT](https://github.com/SkyTNT) in [PR 2685](https://github.com/gradio-app/gradio/pull/2685) + +### Documentation Changes: + +- Performance improvements to docs on mobile by [@aliabd](https://github.com/aliabd) in [PR 2730](https://github.com/gradio-app/gradio/pull/2730) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Make try examples button more prominent by [@aliabd](https://github.com/aliabd) in [PR 2705](https://github.com/gradio-app/gradio/pull/2705) +- Fix id clashes in docs by [@aliabd](https://github.com/aliabd) in [PR 2713](https://github.com/gradio-app/gradio/pull/2713) +- Fix typos in guide docs by [@andridns](https://github.com/andridns) in [PR 2722](https://github.com/gradio-app/gradio/pull/2722) +- Add option to `include_audio` in Video component. When `True`, for `source="webcam"` this will record audio and video, for `source="upload"` this will retain the audio in an uploaded video by [@mandargogate](https://github.com/MandarGogate) in [PR 2721](https://github.com/gradio-app/gradio/pull/2721) + +### Contributors Shoutout: + +- [@andridns](https://github.com/andridns) made their first contribution in [PR 2722](https://github.com/gradio-app/gradio/pull/2722)! + +## 3.11.0 + +### New Features: + +###### Upload Button + +There is now a new component called the `UploadButton` which is a file upload component but in button form! You can also specify what file types it should accept in the form of a list (ex: `image`, `video`, `audio`, `text`, or generic `file`). Added by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2591](https://github.com/gradio-app/gradio/pull/2591). + +Example of how it can be used: + +```python +import gradio as gr + +def upload_file(files): + file_paths = [file.name for file in files] + return file_paths + +with gr.Blocks() as demo: + file_output = gr.File() + upload_button = gr.UploadButton("Click to Upload a File", file_types=["image", "video"], file_count="multiple") + upload_button.upload(upload_file, upload_button, file_output) + +demo.launch() +``` + +###### Revamped API documentation page + +New API Docs page with in-browser playground and updated aesthetics. [@gary149](https://github.com/gary149) in [PR 2652](https://github.com/gradio-app/gradio/pull/2652) + +###### Revamped Login page + +Previously our login page had its own CSS, had no dark mode, and had an ugly json message on the wrong credentials. Made the page more aesthetically consistent, added dark mode support, and a nicer error message. [@aliabid94](https://github.com/aliabid94) in [PR 2684](https://github.com/gradio-app/gradio/pull/2684) + +###### Accessing the Requests Object Directly + +You can now access the Request object directly in your Python function by [@abidlabs](https://github.com/abidlabs) in [PR 2641](https://github.com/gradio-app/gradio/pull/2641). This means that you can access request headers, the client IP address, and so on. In order to use it, add a parameter to your function and set its type hint to be `gr.Request`. Here's a simple example: + +```py +import gradio as gr + +def echo(name, request: gr.Request): + if request: + print("Request headers dictionary:", request.headers) + print("IP address:", request.client.host) + return name + +io = gr.Interface(echo, "textbox", "textbox").launch() +``` + +### Bug Fixes: + +- Fixed bug that limited files from being sent over websockets to 16MB. The new limit + is now 1GB by [@abidlabs](https://github.com/abidlabs) in [PR 2709](https://github.com/gradio-app/gradio/pull/2709) + +### Documentation Changes: + +- Updated documentation for embedding Gradio demos on Spaces as web components by + [@julien-c](https://github.com/julien-c) in [PR 2698](https://github.com/gradio-app/gradio/pull/2698) +- Updated IFrames in Guides to use the host URL instead of the Space name to be consistent with the new method for embedding Spaces, by + [@julien-c](https://github.com/julien-c) in [PR 2692](https://github.com/gradio-app/gradio/pull/2692) +- Colab buttons on every demo in the website! Just click open in colab, and run the demo there. + +https://user-images.githubusercontent.com/9021060/202878400-cb16ed47-f4dd-4cb0-b2f0-102a9ff64135.mov + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Better warnings and error messages for `gr.Interface.load()` by [@abidlabs](https://github.com/abidlabs) in [PR 2694](https://github.com/gradio-app/gradio/pull/2694) +- Add open in colab buttons to demos in docs and /demos by [@aliabd](https://github.com/aliabd) in [PR 2608](https://github.com/gradio-app/gradio/pull/2608) +- Apply different formatting for the types in component docstrings by [@aliabd](https://github.com/aliabd) in [PR 2707](https://github.com/gradio-app/gradio/pull/2707) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.10.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Passes kwargs into `gr.Interface.load()` by [@abidlabs](https://github.com/abidlabs) in [PR 2669](https://github.com/gradio-app/gradio/pull/2669) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Clean up printed statements in Embedded Colab Mode by [@aliabid94](https://github.com/aliabid94) in [PR 2612](https://github.com/gradio-app/gradio/pull/2612) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.10.0 + +- Add support for `'password'` and `'email'` types to `Textbox`. [@pngwn](https://github.com/pngwn) in [PR 2653](https://github.com/gradio-app/gradio/pull/2653) +- `gr.Textbox` component will now raise an exception if `type` is not "text", "email", or "password" [@pngwn](https://github.com/pngwn) in [PR 2653](https://github.com/gradio-app/gradio/pull/2653). This will cause demos using the deprecated `gr.Textbox(type="number")` to raise an exception. + +### Bug Fixes: + +- Updated the minimum FastApi used in tests to version 0.87 by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2647](https://github.com/gradio-app/gradio/pull/2647) +- Fixed bug where interfaces with examples could not be loaded with `gr.Interface.load` by [@freddyaboulton](https://github.com/freddyaboulton) [PR 2640](https://github.com/gradio-app/gradio/pull/2640) +- Fixed bug where the `interactive` property of a component could not be updated by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2639](https://github.com/gradio-app/gradio/pull/2639) +- Fixed bug where some URLs were not being recognized as valid URLs and thus were not + loading correctly in various components by [@abidlabs](https://github.com/abidlabs) in [PR 2659](https://github.com/gradio-app/gradio/pull/2659) + +### Documentation Changes: + +- Fix some typos in the embedded demo names in "05_using_blocks_like_functions.md" by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2656](https://github.com/gradio-app/gradio/pull/2656) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Add support for `'password'` and `'email'` types to `Textbox`. [@pngwn](https://github.com/pngwn) in [PR 2653](https://github.com/gradio-app/gradio/pull/2653) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.9.1 + +### New Features: + +No changes to highlight. + +### Bug Fixes: + +- Only set a min height on md and html when loading by [@pngwn](https://github.com/pngwn) in [PR 2623](https://github.com/gradio-app/gradio/pull/2623) + +### Documentation Changes: + +- See docs for the latest gradio commit to main as well the latest pip release: + +![main-vs-pip](https://user-images.githubusercontent.com/9021060/199607887-aab1ae4e-a070-4527-966d-024397abe15b.gif) + +- Modified the "Connecting To a Database Guide" to use `pd.read_sql` as opposed to low-level postgres connector by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2604](https://github.com/gradio-app/gradio/pull/2604) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Dropdown for seeing docs as latest or main by [@aliabd](https://github.com/aliabd) in [PR 2544](https://github.com/gradio-app/gradio/pull/2544) +- Allow `gr.Templates` to accept parameters to override the defaults by [@abidlabs](https://github.com/abidlabs) in [PR 2600](https://github.com/gradio-app/gradio/pull/2600) +- Components now throw a `ValueError()` if constructed with invalid parameters for `type` or `source` (for components that take those parameters) in [PR 2610](https://github.com/gradio-app/gradio/pull/2610) +- Allow auth with using queue by [@GLGDLY](https://github.com/GLGDLY) in [PR 2611](https://github.com/gradio-app/gradio/pull/2611) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.9 + +### New Features: + +- Gradio is now embedded directly in colab without requiring the share link by [@aliabid94](https://github.com/aliabid94) in [PR 2455](https://github.com/gradio-app/gradio/pull/2455) + +###### Calling functions by api_name in loaded apps + +When you load an upstream app with `gr.Blocks.load`, you can now specify which fn +to call with the `api_name` parameter. + +```python +import gradio as gr +english_translator = gr.Blocks.load(name="spaces/gradio/english-translator") +german = english_translator("My name is Freddy", api_name='translate-to-german') +``` + +The `api_name` parameter will take precedence over the `fn_index` parameter. + +### Bug Fixes: + +- Fixed bug where None could not be used for File,Model3D, and Audio examples by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2588](https://github.com/gradio-app/gradio/pull/2588) +- Fixed links in Plotly map guide + demo by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2578](https://github.com/gradio-app/gradio/pull/2578) +- `gr.Blocks.load()` now correctly loads example files from Spaces [@abidlabs](https://github.com/abidlabs) in [PR 2594](https://github.com/gradio-app/gradio/pull/2594) +- Fixed bug when image clear started upload dialog [@mezotaken](https://github.com/mezotaken) in [PR 2577](https://github.com/gradio-app/gradio/pull/2577) + +### Documentation Changes: + +- Added a Guide on how to configure the queue for maximum performance by [@abidlabs](https://github.com/abidlabs) in [PR 2558](https://github.com/gradio-app/gradio/pull/2558) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Add `api_name` to `Blocks.__call__` by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2593](https://github.com/gradio-app/gradio/pull/2593) +- Update queue with using deque & update requirements by [@GLGDLY](https://github.com/GLGDLY) in [PR 2428](https://github.com/gradio-app/gradio/pull/2428) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.8.2 + +### Bug Fixes: + +- Ensure gradio apps embedded via spaces use the correct endpoint for predictions. [@pngwn](https://github.com/pngwn) in [PR 2567](https://github.com/gradio-app/gradio/pull/2567) +- Ensure gradio apps embedded via spaces use the correct websocket protocol. [@pngwn](https://github.com/pngwn) in [PR 2571](https://github.com/gradio-app/gradio/pull/2571) + +### New Features: + +###### Running Events Continuously + +Gradio now supports the ability to run an event continuously on a fixed schedule. To use this feature, +pass `every=# of seconds` to the event definition. This will run the event every given number of seconds! + +This can be used to: + +- Create live visualizations that show the most up to date data +- Refresh the state of the frontend automatically in response to changes in the backend + +Here is an example of a live plot that refreshes every half second: + +```python +import math +import gradio as gr +import plotly.express as px +import numpy as np + + +plot_end = 2 * math.pi + + +def get_plot(period=1): + global plot_end + x = np.arange(plot_end - 2 * math.pi, plot_end, 0.02) + y = np.sin(2*math.pi*period * x) + fig = px.line(x=x, y=y) + plot_end += 2 * math.pi + return fig + + +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + gr.Markdown("Change the value of the slider to automatically update the plot") + period = gr.Slider(label="Period of plot", value=1, minimum=0, maximum=10, step=1) + plot = gr.Plot(label="Plot (updates every half second)") + + dep = demo.load(get_plot, None, plot, every=0.5) + period.change(get_plot, period, plot, every=0.5, cancels=[dep]) + +demo.queue().launch() +``` + +![live_demo](https://user-images.githubusercontent.com/41651716/198357377-633ce460-4e31-47bd-8202-1440cdd6fe19.gif) + +### Bug Fixes: + +No changes to highlight. + +### Documentation Changes: + +- Explained how to set up `queue` and `auth` when working with reload mode by by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3089](https://github.com/gradio-app/gradio/pull/3089) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Allows loading private Spaces by passing an an `api_key` to `gr.Interface.load()` + by [@abidlabs](https://github.com/abidlabs) in [PR 2568](https://github.com/gradio-app/gradio/pull/2568) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.8 + +### New Features: + +- Allows event listeners to accept a single dictionary as its argument, where the keys are the components and the values are the component values. This is set by passing the input components in the event listener as a set instead of a list. [@aliabid94](https://github.com/aliabid94) in [PR 2550](https://github.com/gradio-app/gradio/pull/2550) + +### Bug Fixes: + +- Fix whitespace issue when using plotly. [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2548](https://github.com/gradio-app/gradio/pull/2548) +- Apply appropriate alt text to all gallery images. [@camenduru](https://github.com/camenduru) in [PR 2358](https://github.com/gradio-app/gradio/pull/2538) +- Removed erroneous tkinter import in gradio.blocks by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2555](https://github.com/gradio-app/gradio/pull/2555) + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Added the `every` keyword to event listeners that runs events on a fixed schedule by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2512](https://github.com/gradio-app/gradio/pull/2512) +- Fix whitespace issue when using plotly. [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2548](https://github.com/gradio-app/gradio/pull/2548) +- Apply appropriate alt text to all gallery images. [@camenduru](https://github.com/camenduru) in [PR 2358](https://github.com/gradio-app/gradio/pull/2538) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.7 + +### New Features: + +###### Batched Functions + +Gradio now supports the ability to pass _batched_ functions. Batched functions are just +functions which take in a list of inputs and return a list of predictions. + +For example, here is a batched function that takes in two lists of inputs (a list of +words and a list of ints), and returns a list of trimmed words as output: + +```py +import time + +def trim_words(words, lens): + trimmed_words = [] + time.sleep(5) + for w, l in zip(words, lens): + trimmed_words.append(w[:l]) + return [trimmed_words] +``` + +The advantage of using batched functions is that if you enable queuing, the Gradio +server can automatically _batch_ incoming requests and process them in parallel, +potentially speeding up your demo. Here's what the Gradio code looks like (notice +the `batch=True` and `max_batch_size=16` -- both of these parameters can be passed +into event triggers or into the `Interface` class) + +```py +import gradio as gr + +with gr.Blocks() as demo: + with gr.Row(): + word = gr.Textbox(label="word", value="abc") + leng = gr.Number(label="leng", precision=0, value=1) + output = gr.Textbox(label="Output") + with gr.Row(): + run = gr.Button() + + event = run.click(trim_words, [word, leng], output, batch=True, max_batch_size=16) + +demo.queue() +demo.launch() +``` + +In the example above, 16 requests could be processed in parallel (for a total inference +time of 5 seconds), instead of each request being processed separately (for a total +inference time of 80 seconds). + +###### Upload Event + +`Video`, `Audio`, `Image`, and `File` components now support a `upload()` event that is triggered when a user uploads a file into any of these components. + +Example usage: + +```py +import gradio as gr + +with gr.Blocks() as demo: + with gr.Row(): + input_video = gr.Video() + output_video = gr.Video() + + # Clears the output video when an input video is uploaded + input_video.upload(lambda : None, None, output_video) +``` + +### Bug Fixes: + +- Fixes issue where plotly animations, interactivity, titles, legends, were not working properly. [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2486](https://github.com/gradio-app/gradio/pull/2486) +- Prevent requests to the `/api` endpoint from skipping the queue if the queue is enabled for that event by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2493](https://github.com/gradio-app/gradio/pull/2493) +- Fixes a bug with `cancels` in event triggers so that it works properly if multiple + Blocks are rendered by [@abidlabs](https://github.com/abidlabs) in [PR 2530](https://github.com/gradio-app/gradio/pull/2530) +- Prevent invalid targets of events from crashing the whole application. [@pngwn](https://github.com/pngwn) in [PR 2534](https://github.com/gradio-app/gradio/pull/2534) +- Properly dequeue cancelled events when multiple apps are rendered by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2540](https://github.com/gradio-app/gradio/pull/2540) +- Fixes videos being cropped due to height/width params not being used [@hannahblair](https://github.com/hannahblair) in [PR 4946](https://github.com/gradio-app/gradio/pull/4946) + +### Documentation Changes: + +- Added an example interactive dashboard to the "Tabular & Plots" section of the Demos page by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2508](https://github.com/gradio-app/gradio/pull/2508) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Fixes the error message if a user builds Gradio locally and tries to use `share=True` by [@abidlabs](https://github.com/abidlabs) in [PR 2502](https://github.com/gradio-app/gradio/pull/2502) +- Allows the render() function to return self by [@Raul9595](https://github.com/Raul9595) in [PR 2514](https://github.com/gradio-app/gradio/pull/2514) +- Fixes issue where plotly animations, interactivity, titles, legends, were not working properly. [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2486](https://github.com/gradio-app/gradio/pull/2486) +- Gradio now supports batched functions by [@abidlabs](https://github.com/abidlabs) in [PR 2218](https://github.com/gradio-app/gradio/pull/2218) +- Add `upload` event for `Video`, `Audio`, `Image`, and `File` components [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2448](https://github.com/gradio-app/gradio/pull/2456) +- Changes websocket path for Spaces as it is no longer necessary to have a different URL for websocket connections on Spaces by [@abidlabs](https://github.com/abidlabs) in [PR 2528](https://github.com/gradio-app/gradio/pull/2528) +- Clearer error message when events are defined outside of a Blocks scope, and a warning if you + try to use `Series` or `Parallel` with `Blocks` by [@abidlabs](https://github.com/abidlabs) in [PR 2543](https://github.com/gradio-app/gradio/pull/2543) +- Adds support for audio samples that are in `float64`, `float16`, or `uint16` formats by [@abidlabs](https://github.com/abidlabs) in [PR 2545](https://github.com/gradio-app/gradio/pull/2545) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.6 + +### New Features: + +###### Cancelling Running Events + +Running events can be cancelled when other events are triggered! To test this feature, pass the `cancels` parameter to the event listener. +For this feature to work, the queue must be enabled. + +![cancel_on_change_rl](https://user-images.githubusercontent.com/41651716/195952623-61a606bd-e82b-4e1a-802e-223154cb8727.gif) + +Code: + +```python +import time +import gradio as gr + +def fake_diffusion(steps): + for i in range(steps): + time.sleep(1) + yield str(i) + +def long_prediction(*args, **kwargs): + time.sleep(10) + return 42 + + +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + n = gr.Slider(1, 10, value=9, step=1, label="Number Steps") + run = gr.Button() + output = gr.Textbox(label="Iterative Output") + stop = gr.Button(value="Stop Iterating") + with gr.Column(): + prediction = gr.Number(label="Expensive Calculation") + run_pred = gr.Button(value="Run Expensive Calculation") + with gr.Column(): + cancel_on_change = gr.Textbox(label="Cancel Iteration and Expensive Calculation on Change") + + click_event = run.click(fake_diffusion, n, output) + stop.click(fn=None, inputs=None, outputs=None, cancels=[click_event]) + pred_event = run_pred.click(fn=long_prediction, inputs=None, outputs=prediction) + + cancel_on_change.change(None, None, None, cancels=[click_event, pred_event]) + + +demo.queue(concurrency_count=1, max_size=20).launch() +``` + +For interfaces, a stop button will be added automatically if the function uses a `yield` statement. + +```python +import gradio as gr +import time + +def iteration(steps): + for i in range(steps): + time.sleep(0.5) + yield i + +gr.Interface(iteration, + inputs=gr.Slider(minimum=1, maximum=10, step=1, value=5), + outputs=gr.Number()).queue().launch() +``` + +![stop_interface_rl](https://user-images.githubusercontent.com/41651716/195952883-e7ca4235-aae3-4852-8f28-96d01d0c5822.gif) + +### Bug Fixes: + +- Add loading status tracker UI to HTML and Markdown components. [@pngwn](https://github.com/pngwn) in [PR 2474](https://github.com/gradio-app/gradio/pull/2474) +- Fixed videos being mirrored in the front-end if source is not webcam by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2475](https://github.com/gradio-app/gradio/pull/2475) +- Add clear button for timeseries component [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2487](https://github.com/gradio-app/gradio/pull/2487) +- Removes special characters from temporary filenames so that the files can be served by components [@abidlabs](https://github.com/abidlabs) in [PR 2480](https://github.com/gradio-app/gradio/pull/2480) +- Fixed infinite reload loop when mounting gradio as a sub application by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2477](https://github.com/gradio-app/gradio/pull/2477) + +### Documentation Changes: + +- Adds a demo to show how a sound alert can be played upon completion of a prediction by [@abidlabs](https://github.com/abidlabs) in [PR 2478](https://github.com/gradio-app/gradio/pull/2478) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Enable running events to be cancelled from other events by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2433](https://github.com/gradio-app/gradio/pull/2433) +- Small fix for version check before reuploading demos by [@aliabd](https://github.com/aliabd) in [PR 2469](https://github.com/gradio-app/gradio/pull/2469) +- Add loading status tracker UI to HTML and Markdown components. [@pngwn](https://github.com/pngwn) in [PR 2400](https://github.com/gradio-app/gradio/pull/2474) +- Add clear button for timeseries component [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2487](https://github.com/gradio-app/gradio/pull/2487) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.5 + +### Bug Fixes: + +- Ensure that Gradio does not take control of the HTML page title when embedding a gradio app as a web component, this behaviour flipped by adding `control_page_title="true"` to the webcomponent. [@pngwn](https://github.com/pngwn) in [PR 2400](https://github.com/gradio-app/gradio/pull/2400) +- Decreased latency in iterative-output demos by making the iteration asynchronous [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2409](https://github.com/gradio-app/gradio/pull/2409) +- Fixed queue getting stuck under very high load by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2374](https://github.com/gradio-app/gradio/pull/2374) +- Ensure that components always behave as if `interactive=True` were set when the following conditions are true: + + - no default value is provided, + - they are not set as the input or output of an event, + - `interactive` kwarg is not set. + + [@pngwn](https://github.com/pngwn) in [PR 2459](https://github.com/gradio-app/gradio/pull/2459) + +### New Features: + +- When an `Image` component is set to `source="upload"`, it is now possible to drag and drop and image to replace a previously uploaded image by [@pngwn](https://github.com/pngwn) in [PR 1711](https://github.com/gradio-app/gradio/issues/1711) +- The `gr.Dataset` component now accepts `HTML` and `Markdown` components by [@abidlabs](https://github.com/abidlabs) in [PR 2437](https://github.com/gradio-app/gradio/pull/2437) + +### Documentation Changes: + +- Improved documentation for the `gr.Dataset` component by [@abidlabs](https://github.com/abidlabs) in [PR 2437](https://github.com/gradio-app/gradio/pull/2437) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +- The `Carousel` component is officially deprecated. Since gradio 3.0, code containing the `Carousel` component would throw warnings. As of the next release, the `Carousel` component will raise an exception. + +### Full Changelog: + +- Speeds up Gallery component by using temporary files instead of base64 representation in the front-end by [@proxyphi](https://github.com/proxyphi), [@pngwn](https://github.com/pngwn), and [@abidlabs](https://github.com/abidlabs) in [PR 2265](https://github.com/gradio-app/gradio/pull/2265) +- Fixed some embedded demos in the guides by not loading the gradio web component in some guides by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2403](https://github.com/gradio-app/gradio/pull/2403) +- When an `Image` component is set to `source="upload"`, it is now possible to drag and drop and image to replace a previously uploaded image by [@pngwn](https://github.com/pngwn) in [PR 2400](https://github.com/gradio-app/gradio/pull/2410) +- Improve documentation of the `Blocks.load()` event by [@abidlabs](https://github.com/abidlabs) in [PR 2413](https://github.com/gradio-app/gradio/pull/2413) +- Decreased latency in iterative-output demos by making the iteration asynchronous [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2409](https://github.com/gradio-app/gradio/pull/2409) +- Updated share link message to reference new Spaces Hardware [@abidlabs](https://github.com/abidlabs) in [PR 2423](https://github.com/gradio-app/gradio/pull/2423) +- Automatically restart spaces if they're down by [@aliabd](https://github.com/aliabd) in [PR 2405](https://github.com/gradio-app/gradio/pull/2405) +- Carousel component is now deprecated by [@abidlabs](https://github.com/abidlabs) in [PR 2434](https://github.com/gradio-app/gradio/pull/2434) +- Build Gradio from source in ui tests by by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2440](https://github.com/gradio-app/gradio/pull/2440) +- Change "return ValueError" to "raise ValueError" by [@vzakharov](https://github.com/vzakharov) in [PR 2445](https://github.com/gradio-app/gradio/pull/2445) +- Add guide on creating a map demo using the `gr.Plot()` component [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2402](https://github.com/gradio-app/gradio/pull/2402) +- Add blur event for `Textbox` and `Number` components [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2448](https://github.com/gradio-app/gradio/pull/2448) +- Stops a gradio launch from hogging a port even after it's been killed [@aliabid94](https://github.com/aliabid94) in [PR 2453](https://github.com/gradio-app/gradio/pull/2453) +- Fix embedded interfaces on touch screen devices by [@aliabd](https://github.com/aliabd) in [PR 2457](https://github.com/gradio-app/gradio/pull/2457) +- Upload all demos to spaces by [@aliabd](https://github.com/aliabd) in [PR 2281](https://github.com/gradio-app/gradio/pull/2281) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.4.1 + +### New Features: + +###### 1. See Past and Upcoming Changes in the Release History 👀 + +You can now see gradio's release history directly on the website, and also keep track of upcoming changes. Just go [here](https://gradio.app/changelog/). + +![release-history](https://user-images.githubusercontent.com/9021060/193145458-3de699f7-7620-45de-aa73-a1c1b9b96257.gif) + +### Bug Fixes: + +1. Fix typo in guide image path by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2357](https://github.com/gradio-app/gradio/pull/2357) +2. Raise error if Blocks has duplicate component with same IDs by [@abidlabs](https://github.com/abidlabs) in [PR 2359](https://github.com/gradio-app/gradio/pull/2359) +3. Catch the permission exception on the audio component by [@Ian-GL](https://github.com/Ian-GL) in [PR 2330](https://github.com/gradio-app/gradio/pull/2330) +4. Fix image_classifier_interface_load demo by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2365](https://github.com/gradio-app/gradio/pull/2365) +5. Fix combining adjacent components without gaps by introducing `gr.Row(variant="compact")` by [@aliabid94](https://github.com/aliabid94) in [PR 2291](https://github.com/gradio-app/gradio/pull/2291) This comes with deprecation of the following arguments for `Component.style`: `round`, `margin`, `border`. +6. Fix audio streaming, which was previously choppy in [PR 2351](https://github.com/gradio-app/gradio/pull/2351). Big thanks to [@yannickfunk](https://github.com/yannickfunk) for the proposed solution. +7. Fix bug where new typeable slider doesn't respect the minimum and maximum values [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2380](https://github.com/gradio-app/gradio/pull/2380) + +### Documentation Changes: + +1. New Guide: Connecting to a Database 🗄️ + + A new guide by [@freddyaboulton](https://github.com/freddyaboulton) that explains how you can use Gradio to connect your app to a database. Read more [here](https://gradio.app/connecting_to_a_database/). + +2. New Guide: Running Background Tasks 🥷 + + A new guide by [@freddyaboulton](https://github.com/freddyaboulton) that explains how you can run background tasks from your gradio app. Read more [here](https://gradio.app/running_background_tasks/). + +3. Small fixes to docs for `Image` component by [@abidlabs](https://github.com/abidlabs) in [PR 2372](https://github.com/gradio-app/gradio/pull/2372) + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- Create a guide on how to connect an app to a database hosted on the cloud by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2341](https://github.com/gradio-app/gradio/pull/2341) +- Removes `analytics` dependency by [@abidlabs](https://github.com/abidlabs) in [PR 2347](https://github.com/gradio-app/gradio/pull/2347) +- Add guide on launching background tasks from your app by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2350](https://github.com/gradio-app/gradio/pull/2350) +- Fix typo in guide image path by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2357](https://github.com/gradio-app/gradio/pull/2357) +- Raise error if Blocks has duplicate component with same IDs by [@abidlabs](https://github.com/abidlabs) in [PR 2359](https://github.com/gradio-app/gradio/pull/2359) +- Hotfix: fix version back to 3.4 by [@abidlabs](https://github.com/abidlabs) in [PR 2361](https://github.com/gradio-app/gradio/pull/2361) +- Change version.txt to 3.4 instead of 3.4.0 by [@aliabd](https://github.com/aliabd) in [PR 2363](https://github.com/gradio-app/gradio/pull/2363) +- Catch the permission exception on the audio component by [@Ian-GL](https://github.com/Ian-GL) in [PR 2330](https://github.com/gradio-app/gradio/pull/2330) +- Fix image_classifier_interface_load demo by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2365](https://github.com/gradio-app/gradio/pull/2365) +- Small fixes to docs for `Image` component by [@abidlabs](https://github.com/abidlabs) in [PR 2372](https://github.com/gradio-app/gradio/pull/2372) +- Automated Release Notes by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2306](https://github.com/gradio-app/gradio/pull/2306) +- Fixed small typos in the docs [@julien-c](https://github.com/julien-c) in [PR 2373](https://github.com/gradio-app/gradio/pull/2373) +- Adds ability to disable pre/post-processing for examples [@abidlabs](https://github.com/abidlabs) in [PR 2383](https://github.com/gradio-app/gradio/pull/2383) +- Copy changelog file in website docker by [@aliabd](https://github.com/aliabd) in [PR 2384](https://github.com/gradio-app/gradio/pull/2384) +- Lets users provide a `gr.update()` dictionary even if post-processing is disabled [@abidlabs](https://github.com/abidlabs) in [PR 2385](https://github.com/gradio-app/gradio/pull/2385) +- Fix bug where errors would cause apps run in reload mode to hang forever by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2394](https://github.com/gradio-app/gradio/pull/2394) +- Fix bug where new typeable slider doesn't respect the minimum and maximum values [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2380](https://github.com/gradio-app/gradio/pull/2380) + +### Contributors Shoutout: + +No changes to highlight. + +## 3.4 + +### New Features: + +###### 1. Gallery Captions 🖼️ + +You can now pass captions to images in the Gallery component. To do so you need to pass a {List} of (image, {str} caption) tuples. This is optional and the component also accepts just a list of the images. + +Here's an example: + +```python +import gradio as gr + +images_with_captions = [ + ("https://images.unsplash.com/photo-1551969014-7d2c4cddf0b6", "Cheetah by David Groves"), + ("https://images.unsplash.com/photo-1546182990-dffeafbe841d", "Lion by Francesco"), + ("https://images.unsplash.com/photo-1561731216-c3a4d99437d5", "Tiger by Mike Marrah") + ] + +with gr.Blocks() as demo: + gr.Gallery(value=images_with_captions) + +demo.launch() +``` + +gallery_captions + +###### 2. Type Values into the Slider 🔢 + +You can now type values directly on the Slider component! Here's what it looks like: + +![type-slider](https://user-images.githubusercontent.com/9021060/192399877-76b662a1-fede-4417-a932-fc15f0da7360.gif) + +###### 3. Better Sketching and Inpainting 🎨 + +We've made a lot of changes to our Image component so that it can support better sketching and inpainting. + +Now supports: + +- A standalone black-and-white sketch + +```python +import gradio as gr +demo = gr.Interface(lambda x: x, gr.Sketchpad(), gr.Image()) +demo.launch() +``` + +![bw](https://user-images.githubusercontent.com/9021060/192410264-b08632b5-7b2a-4f86-afb0-5760e7b474cf.gif) + +- A standalone color sketch + +```python +import gradio as gr +demo = gr.Interface(lambda x: x, gr.Paint(), gr.Image()) +demo.launch() +``` + +![color-sketch](https://user-images.githubusercontent.com/9021060/192410500-3c8c3e64-a5fd-4df2-a991-f0a5cef93728.gif) + +- An uploadable image with black-and-white or color sketching + +```python +import gradio as gr +demo = gr.Interface(lambda x: x, gr.Image(source='upload', tool='color-sketch'), gr.Image()) # for black and white, tool = 'sketch' +demo.launch() +``` + +![sketch-new](https://user-images.githubusercontent.com/9021060/192402422-e53cb7b6-024e-448c-87eb-d6a35a63c476.gif) + +- Webcam with black-and-white or color sketching + +```python +import gradio as gr +demo = gr.Interface(lambda x: x, gr.Image(source='webcam', tool='color-sketch'), gr.Image()) # for black and white, tool = 'sketch' +demo.launch() +``` + +![webcam-sketch](https://user-images.githubusercontent.com/9021060/192410820-0ffaf324-776e-4e1f-9de6-0fdbbf4940fa.gif) + +As well as other fixes + +### Bug Fixes: + +1. Fix bug where max concurrency count is not respected in queue by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2286](https://github.com/gradio-app/gradio/pull/2286) +2. fix : queue could be blocked by [@SkyTNT](https://github.com/SkyTNT) in [PR 2288](https://github.com/gradio-app/gradio/pull/2288) +3. Supports `gr.update()` in example caching by [@abidlabs](https://github.com/abidlabs) in [PR 2309](https://github.com/gradio-app/gradio/pull/2309) +4. Clipboard fix for iframes by [@abidlabs](https://github.com/abidlabs) in [PR 2321](https://github.com/gradio-app/gradio/pull/2321) +5. Fix: Dataframe column headers are reset when you add a new column by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2318](https://github.com/gradio-app/gradio/pull/2318) +6. Added support for URLs for Video, Audio, and Image by [@abidlabs](https://github.com/abidlabs) in [PR 2256](https://github.com/gradio-app/gradio/pull/2256) +7. Add documentation about how to create and use the Gradio FastAPI app by [@abidlabs](https://github.com/abidlabs) in [PR 2263](https://github.com/gradio-app/gradio/pull/2263) + +### Documentation Changes: + +1. Adding a Playground Tab to the Website by [@aliabd](https://github.com/aliabd) in [PR 1860](https://github.com/gradio-app/gradio/pull/1860) +2. Gradio for Tabular Data Science Workflows Guide by [@merveenoyan](https://github.com/merveenoyan) in [PR 2199](https://github.com/gradio-app/gradio/pull/2199) +3. Promotes `postprocess` and `preprocess` to documented parameters by [@abidlabs](https://github.com/abidlabs) in [PR 2293](https://github.com/gradio-app/gradio/pull/2293) +4. Update 2)key_features.md by [@voidxd](https://github.com/voidxd) in [PR 2326](https://github.com/gradio-app/gradio/pull/2326) +5. Add docs to blocks context postprocessing function by [@Ian-GL](https://github.com/Ian-GL) in [PR 2332](https://github.com/gradio-app/gradio/pull/2332) + +### Testing and Infrastructure Changes + +1. Website fixes and refactoring by [@aliabd](https://github.com/aliabd) in [PR 2280](https://github.com/gradio-app/gradio/pull/2280) +2. Don't deploy to spaces on release by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2313](https://github.com/gradio-app/gradio/pull/2313) + +### Full Changelog: + +- Website fixes and refactoring by [@aliabd](https://github.com/aliabd) in [PR 2280](https://github.com/gradio-app/gradio/pull/2280) +- Fix bug where max concurrency count is not respected in queue by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2286](https://github.com/gradio-app/gradio/pull/2286) +- Promotes `postprocess` and `preprocess` to documented parameters by [@abidlabs](https://github.com/abidlabs) in [PR 2293](https://github.com/gradio-app/gradio/pull/2293) +- Raise warning when trying to cache examples but not all inputs have examples by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2279](https://github.com/gradio-app/gradio/pull/2279) +- fix : queue could be blocked by [@SkyTNT](https://github.com/SkyTNT) in [PR 2288](https://github.com/gradio-app/gradio/pull/2288) +- Don't deploy to spaces on release by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2313](https://github.com/gradio-app/gradio/pull/2313) +- Supports `gr.update()` in example caching by [@abidlabs](https://github.com/abidlabs) in [PR 2309](https://github.com/gradio-app/gradio/pull/2309) +- Respect Upstream Queue when loading interfaces/blocks from Spaces by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2294](https://github.com/gradio-app/gradio/pull/2294) +- Clipboard fix for iframes by [@abidlabs](https://github.com/abidlabs) in [PR 2321](https://github.com/gradio-app/gradio/pull/2321) +- Sketching + Inpainting Capabilities to Gradio by [@abidlabs](https://github.com/abidlabs) in [PR 2144](https://github.com/gradio-app/gradio/pull/2144) +- Update 2)key_features.md by [@voidxd](https://github.com/voidxd) in [PR 2326](https://github.com/gradio-app/gradio/pull/2326) +- release 3.4b3 by [@abidlabs](https://github.com/abidlabs) in [PR 2328](https://github.com/gradio-app/gradio/pull/2328) +- Fix: Dataframe column headers are reset when you add a new column by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2318](https://github.com/gradio-app/gradio/pull/2318) +- Start queue when gradio is a sub application by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2319](https://github.com/gradio-app/gradio/pull/2319) +- Fix Web Tracker Script by [@aliabd](https://github.com/aliabd) in [PR 2308](https://github.com/gradio-app/gradio/pull/2308) +- Add docs to blocks context postprocessing function by [@Ian-GL](https://github.com/Ian-GL) in [PR 2332](https://github.com/gradio-app/gradio/pull/2332) +- Fix typo in iterator variable name in run_predict function by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2340](https://github.com/gradio-app/gradio/pull/2340) +- Add captions to galleries by [@aliabid94](https://github.com/aliabid94) in [PR 2284](https://github.com/gradio-app/gradio/pull/2284) +- Typeable value on gradio.Slider by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2329](https://github.com/gradio-app/gradio/pull/2329) + +### Contributors Shoutout: + +- [@SkyTNT](https://github.com/SkyTNT) made their first contribution in [PR 2288](https://github.com/gradio-app/gradio/pull/2288) +- [@voidxd](https://github.com/voidxd) made their first contribution in [PR 2326](https://github.com/gradio-app/gradio/pull/2326) + +## 3.3 + +### New Features: + +###### 1. Iterative Outputs ⏳ + +You can now create an iterative output simply by having your function return a generator! + +Here's (part of) an example that was used to generate the interface below it. [See full code](https://colab.research.google.com/drive/1m9bWS6B82CT7bw-m4L6AJR8za7fEK7Ov?usp=sharing). + +```python +def predict(steps, seed): + generator = torch.manual_seed(seed) + for i in range(1,steps): + yield pipeline(generator=generator, num_inference_steps=i)["sample"][0] +``` + +![example](https://user-images.githubusercontent.com/9021060/189086273-f5e7087d-71fa-4158-90a9-08e84da0421c.mp4) + +###### 2. Accordion Layout 🆕 + +This version of Gradio introduces a new layout component to Blocks: the Accordion. Wrap your elements in a neat, expandable layout that allows users to toggle them as needed. + +Usage: ([Read the docs](https://gradio.app/docs/#accordion)) + +```python +with gr.Accordion("open up"): +# components here +``` + +![accordion](https://user-images.githubusercontent.com/9021060/189088465-f0ffd7f0-fc6a-42dc-9249-11c5e1e0529b.gif) + +###### 3. Skops Integration 📈 + +Our new integration with [skops](https://huggingface.co/blog/skops) allows you to load tabular classification and regression models directly from the [hub](https://huggingface.co/models). + +Here's a classification example showing how quick it is to set up an interface for a [model](https://huggingface.co/scikit-learn/tabular-playground). + +```python +import gradio as gr +gr.Interface.load("models/scikit-learn/tabular-playground").launch() +``` + +![187936493-5c90c01d-a6dd-400f-aa42-833a096156a1](https://user-images.githubusercontent.com/9021060/189090519-328fbcb4-120b-43c8-aa54-d6fccfa6b7e8.png) + +### Bug Fixes: + +No changes to highlight. + +### Documentation Changes: + +No changes to highlight. + +### Testing and Infrastructure Changes: + +No changes to highlight. + +### Breaking Changes: + +No changes to highlight. + +### Full Changelog: + +- safari fixes by [@pngwn](https://github.com/pngwn) in [PR 2138](https://github.com/gradio-app/gradio/pull/2138) +- Fix roundedness and form borders by [@aliabid94](https://github.com/aliabid94) in [PR 2147](https://github.com/gradio-app/gradio/pull/2147) +- Better processing of example data prior to creating dataset component by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2147](https://github.com/gradio-app/gradio/pull/2147) +- Show error on Connection drops by [@aliabid94](https://github.com/aliabid94) in [PR 2147](https://github.com/gradio-app/gradio/pull/2147) +- 3.2 release! by [@abidlabs](https://github.com/abidlabs) in [PR 2139](https://github.com/gradio-app/gradio/pull/2139) +- Fixed Named API Requests by [@abidlabs](https://github.com/abidlabs) in [PR 2151](https://github.com/gradio-app/gradio/pull/2151) +- Quick Fix: Cannot upload Model3D image after clearing it by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2168](https://github.com/gradio-app/gradio/pull/2168) +- Fixed misleading log when server_name is '0.0.0.0' by [@lamhoangtung](https://github.com/lamhoangtung) in [PR 2176](https://github.com/gradio-app/gradio/pull/2176) +- Keep embedded PngInfo metadata by [@cobryan05](https://github.com/cobryan05) in [PR 2170](https://github.com/gradio-app/gradio/pull/2170) +- Skops integration: Load tabular classification and regression models from the hub by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2126](https://github.com/gradio-app/gradio/pull/2126) +- Respect original filename when cached example files are downloaded by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2145](https://github.com/gradio-app/gradio/pull/2145) +- Add manual trigger to deploy to pypi by [@abidlabs](https://github.com/abidlabs) in [PR 2192](https://github.com/gradio-app/gradio/pull/2192) +- Fix bugs with gr.update by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2157](https://github.com/gradio-app/gradio/pull/2157) +- Make queue per app by [@aliabid94](https://github.com/aliabid94) in [PR 2193](https://github.com/gradio-app/gradio/pull/2193) +- Preserve Labels In Interpretation Components by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2166](https://github.com/gradio-app/gradio/pull/2166) +- Quick Fix: Multiple file download not working by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2169](https://github.com/gradio-app/gradio/pull/2169) +- use correct MIME type for js-script file by [@daspartho](https://github.com/daspartho) in [PR 2200](https://github.com/gradio-app/gradio/pull/2200) +- Add accordion component by [@aliabid94](https://github.com/aliabid94) in [PR 2208](https://github.com/gradio-app/gradio/pull/2208) + +### Contributors Shoutout: + +- [@lamhoangtung](https://github.com/lamhoangtung) made their first contribution in [PR 2176](https://github.com/gradio-app/gradio/pull/2176) +- [@cobryan05](https://github.com/cobryan05) made their first contribution in [PR 2170](https://github.com/gradio-app/gradio/pull/2170) +- [@daspartho](https://github.com/daspartho) made their first contribution in [PR 2200](https://github.com/gradio-app/gradio/pull/2200) + +## 3.2 + +### New Features: + +###### 1. Improvements to Queuing 🥇 + +We've implemented a brand new queuing system based on **web sockets** instead of HTTP long polling. Among other things, this allows us to manage queue sizes better on Hugging Face Spaces. There are also additional queue-related parameters you can add: + +- Now supports concurrent workers (parallelization) + +```python +demo = gr.Interface(...) +demo.queue(concurrency_count=3) +demo.launch() +``` + +- Configure a maximum queue size + +```python +demo = gr.Interface(...) +demo.queue(max_size=100) +demo.launch() +``` + +- If a user closes their tab / browser, they leave the queue, which means the demo will run faster for everyone else + +###### 2. Fixes to Examples + +- Dataframe examples will render properly, and look much clearer in the UI: (thanks to PR #2125) + +![Screen Shot 2022-08-30 at 8 29 58 PM](https://user-images.githubusercontent.com/9021060/187586561-d915bafb-f968-4966-b9a2-ef41119692b2.png) + +- Image and Video thumbnails are cropped to look neater and more uniform: (thanks to PR #2109) + +![Screen Shot 2022-08-30 at 8 32 15 PM](https://user-images.githubusercontent.com/9021060/187586890-56e1e4f0-1b84-42d9-a82f-911772c41030.png) + +- Other fixes in PR #2131 and #2064 make it easier to design and use Examples + +###### 3. Component Fixes 🧱 + +- Specify the width and height of an image in its style tag (thanks to PR #2133) + +```python +components.Image().style(height=260, width=300) +``` + +- Automatic conversion of videos so they are playable in the browser (thanks to PR #2003). Gradio will check if a video's format is playable in the browser and, if it isn't, will automatically convert it to a format that is (mp4). +- Pass in a json filepath to the Label component (thanks to PR #2083) +- Randomize the default value of a Slider (thanks to PR #1935) + +![slider-random](https://user-images.githubusercontent.com/9021060/187596230-3db9697f-9f4d-42f5-9387-d77573513448.gif) + +- Improvements to State in PR #2100 + +###### 4. Ability to Randomize Input Sliders and Reload Data whenever the Page Loads + +- In some cases, you want to be able to show a different set of input data to every user as they load the page app. For example, you might want to randomize the value of a "seed" `Slider` input. Or you might want to show a `Textbox` with the current date. We now supporting passing _functions_ as the default value in input components. When you pass in a function, it gets **re-evaluated** every time someone loads the demo, allowing you to reload / change data for different users. + +Here's an example loading the current date time into an input Textbox: + +```python +import gradio as gr +import datetime + +with gr.Blocks() as demo: + gr.Textbox(datetime.datetime.now) + +demo.launch() +``` + +Note that we don't evaluate the function -- `datetime.datetime.now()` -- we pass in the function itself to get this behavior -- `datetime.datetime.now` + +Because randomizing the initial value of `Slider` is a common use case, we've added a `randomize` keyword argument you can use to randomize its initial value: + +```python +import gradio as gr +demo = gr.Interface(lambda x:x, gr.Slider(0, 10, randomize=True), "number") +demo.launch() +``` + +###### 5. New Guide 🖊️ + +- [Gradio and W&B Integration](https://gradio.app/Gradio_and_Wandb_Integration/) + +### Full Changelog: + +- Reset components to original state by setting value to None by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2044](https://github.com/gradio-app/gradio/pull/2044) +- Cleaning up the way data is processed for components by [@abidlabs](https://github.com/abidlabs) in [PR 1967](https://github.com/gradio-app/gradio/pull/1967) +- version 3.1.8b by [@abidlabs](https://github.com/abidlabs) in [PR 2063](https://github.com/gradio-app/gradio/pull/2063) +- Wandb guide by [@AK391](https://github.com/AK391) in [PR 1898](https://github.com/gradio-app/gradio/pull/1898) +- Add a flagging callback to save json files to a hugging face dataset by [@chrisemezue](https://github.com/chrisemezue) in [PR 1821](https://github.com/gradio-app/gradio/pull/1821) +- Add data science demos to landing page by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2067](https://github.com/gradio-app/gradio/pull/2067) +- Hide time series + xgboost demos by default by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2079](https://github.com/gradio-app/gradio/pull/2079) +- Encourage people to keep trying when queue full by [@apolinario](https://github.com/apolinario) in [PR 2076](https://github.com/gradio-app/gradio/pull/2076) +- Updated our analytics on creation of Blocks/Interface by [@abidlabs](https://github.com/abidlabs) in [PR 2082](https://github.com/gradio-app/gradio/pull/2082) +- `Label` component now accepts file paths to `.json` files by [@abidlabs](https://github.com/abidlabs) in [PR 2083](https://github.com/gradio-app/gradio/pull/2083) +- Fix issues related to demos in Spaces by [@abidlabs](https://github.com/abidlabs) in [PR 2086](https://github.com/gradio-app/gradio/pull/2086) +- Fix TimeSeries examples not properly displayed in UI by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2064](https://github.com/gradio-app/gradio/pull/2064) +- Fix infinite requests when doing tab item select by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2070](https://github.com/gradio-app/gradio/pull/2070) +- Accept deprecated `file` route as well by [@abidlabs](https://github.com/abidlabs) in [PR 2099](https://github.com/gradio-app/gradio/pull/2099) +- Allow frontend method execution on Block.load event by [@codedealer](https://github.com/codedealer) in [PR 2108](https://github.com/gradio-app/gradio/pull/2108) +- Improvements to `State` by [@abidlabs](https://github.com/abidlabs) in [PR 2100](https://github.com/gradio-app/gradio/pull/2100) +- Catch IndexError, KeyError in video_is_playable by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 2113](https://github.com/gradio-app/gradio/pull/2113) +- Fix: Download button does not respect the filepath returned by the function by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 2073](https://github.com/gradio-app/gradio/pull/2073) +- Refactoring Layout: Adding column widths, forms, and more. by [@aliabid94](https://github.com/aliabid94) in [PR 2097](https://github.com/gradio-app/gradio/pull/2097) +- Update CONTRIBUTING.md by [@abidlabs](https://github.com/abidlabs) in [PR 2118](https://github.com/gradio-app/gradio/pull/2118) +- 2092 df ex by [@pngwn](https://github.com/pngwn) in [PR 2125](https://github.com/gradio-app/gradio/pull/2125) +- feat(samples table/gallery): Crop thumbs to square by [@ronvoluted](https://github.com/ronvoluted) in [PR 2109](https://github.com/gradio-app/gradio/pull/2109) +- Some enhancements to `gr.Examples` by [@abidlabs](https://github.com/abidlabs) in [PR 2131](https://github.com/gradio-app/gradio/pull/2131) +- Image size fix by [@aliabid94](https://github.com/aliabid94) in [PR 2133](https://github.com/gradio-app/gradio/pull/2133) + +### Contributors Shoutout: + +- [@chrisemezue](https://github.com/chrisemezue) made their first contribution in [PR 1821](https://github.com/gradio-app/gradio/pull/1821) +- [@apolinario](https://github.com/apolinario) made their first contribution in [PR 2076](https://github.com/gradio-app/gradio/pull/2076) +- [@codedealer](https://github.com/codedealer) made their first contribution in [PR 2108](https://github.com/gradio-app/gradio/pull/2108) + +## 3.1 + +### New Features: + +###### 1. Embedding Demos on Any Website 💻 + +With PR #1444, Gradio is now distributed as a web component. This means demos can be natively embedded on websites. You'll just need to add two lines: one to load the gradio javascript, and one to link to the demos backend. + +Here's a simple example that embeds the demo from a Hugging Face space: + +```html + + +``` + +But you can also embed demos that are running anywhere, you just need to link the demo to `src` instead of `space`. In fact, all the demos on the gradio website are embedded this way: + +Screen Shot 2022-07-14 at 2 41 44 PM + +Read more in the [Embedding Gradio Demos](https://gradio.app/embedding_gradio_demos) guide. + +###### 2. Reload Mode 👨‍💻 + +Reload mode helps developers create gradio demos faster by automatically reloading the demo whenever the code changes. It can support development on Python IDEs (VS Code, PyCharm, etc), the terminal, as well as Jupyter notebooks. + +If your demo code is in a script named `app.py`, instead of running `python app.py` you can now run `gradio app.py` and that will launch the demo in reload mode: + +```bash +Launching in reload mode on: http://127.0.0.1:7860 (Press CTRL+C to quit) +Watching... +WARNING: The --reload flag should not be used in production on Windows. +``` + +If you're working from a Jupyter or Colab Notebook, use these magic commands instead: `%load_ext gradio` when you import gradio, and `%%blocks` in the top of the cell with the demo code. Here's an example that shows how much faster the development becomes: + +![Blocks](https://user-images.githubusercontent.com/9021060/178986488-ed378cc8-5141-4330-ba41-672b676863d0.gif) + +###### 3. Inpainting Support on `gr.Image()` 🎨 + +We updated the Image component to add support for inpainting demos. It works by adding `tool="sketch"` as a parameter, that passes both an image and a sketchable mask to your prediction function. + +Here's an example from the [LAMA space](https://huggingface.co/spaces/akhaliq/lama): + +![FXApVlFVsAALSD-](https://user-images.githubusercontent.com/9021060/178989479-549867c8-7fb0-436a-a97d-1e91c9f5e611.jpeg) + +###### 4. Markdown and HTML support in Dataframes 🔢 + +We upgraded the Dataframe component in PR #1684 to support rendering Markdown and HTML inside the cells. + +This means you can build Dataframes that look like the following: + +![image (8)](https://user-images.githubusercontent.com/9021060/178991233-41cb07a5-e7a3-433e-89b8-319bc78eb9c2.png) + +###### 5. `gr.Examples()` for Blocks 🧱 + +We've added the `gr.Examples` component helper to allow you to add examples to any Blocks demo. This class is a wrapper over the `gr.Dataset` component. + +Screen Shot 2022-07-14 at 2 23 50 PM + +gr.Examples takes two required parameters: + +- `examples` which takes in a nested list +- `inputs` which takes in a component or list of components + +You can read more in the [Examples docs](https://gradio.app/docs/#examples) or the [Adding Examples to your Demos guide](https://gradio.app/adding_examples_to_your_app/). + +###### 6. Fixes to Audio Streaming + +With [PR 1828](https://github.com/gradio-app/gradio/pull/1828) we now hide the status loading animation, as well as remove the echo in streaming. Check out the [stream_audio](https://github.com/gradio-app/gradio/blob/main/demo/stream_audio/run.py) demo for more or read through our [Real Time Speech Recognition](https://gradio.app/real_time_speech_recognition/) guide. + +Screen Shot 2022-07-19 at 6 02 35 PM + +### Full Changelog: + +- File component: list multiple files and allow for download #1446 by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1681](https://github.com/gradio-app/gradio/pull/1681) +- Add ColorPicker to docs by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1768](https://github.com/gradio-app/gradio/pull/1768) +- Mock out requests in TestRequest unit tests by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1794](https://github.com/gradio-app/gradio/pull/1794) +- Add requirements.txt and test_files to source dist by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1817](https://github.com/gradio-app/gradio/pull/1817) +- refactor: f-string for tunneling.py by [@nhankiet](https://github.com/nhankiet) in [PR 1819](https://github.com/gradio-app/gradio/pull/1819) +- Miscellaneous formatting improvements to website by [@aliabd](https://github.com/aliabd) in [PR 1754](https://github.com/gradio-app/gradio/pull/1754) +- `integrate()` method moved to `Blocks` by [@abidlabs](https://github.com/abidlabs) in [PR 1776](https://github.com/gradio-app/gradio/pull/1776) +- Add python-3.7 tests by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1818](https://github.com/gradio-app/gradio/pull/1818) +- Copy test dir in website dockers by [@aliabd](https://github.com/aliabd) in [PR 1827](https://github.com/gradio-app/gradio/pull/1827) +- Add info to docs on how to set default values for components by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1788](https://github.com/gradio-app/gradio/pull/1788) +- Embedding Components on Docs by [@aliabd](https://github.com/aliabd) in [PR 1726](https://github.com/gradio-app/gradio/pull/1726) +- Remove usage of deprecated gr.inputs and gr.outputs from website by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1796](https://github.com/gradio-app/gradio/pull/1796) +- Some cleanups to the docs page by [@abidlabs](https://github.com/abidlabs) in [PR 1822](https://github.com/gradio-app/gradio/pull/1822) + +### Contributors Shoutout: + +- [@nhankiet](https://github.com/nhankiet) made their first contribution in [PR 1819](https://github.com/gradio-app/gradio/pull/1819) + +## 3.0 + +###### 🔥 Gradio 3.0 is the biggest update to the library, ever. + +### New Features: + +###### 1. Blocks 🧱 + +Blocks is a new, low-level API that allows you to have full control over the data flows and layout of your application. It allows you to build very complex, multi-step applications. For example, you might want to: + +- Group together related demos as multiple tabs in one web app +- Change the layout of your demo instead of just having all of the inputs on the left and outputs on the right +- Have multi-step interfaces, in which the output of one model becomes the input to the next model, or have more flexible data flows in general +- Change a component's properties (for example, the choices in a Dropdown) or its visibility based on user input + +Here's a simple example that creates the demo below it: + +```python +import gradio as gr + +def update(name): + return f"Welcome to Gradio, {name}!" + +demo = gr.Blocks() + +with demo: + gr.Markdown( + """ + # Hello World! + Start typing below to see the output. + """) + inp = gr.Textbox(placeholder="What is your name?") + out = gr.Textbox() + + inp.change(fn=update, + inputs=inp, + outputs=out) + +demo.launch() +``` + +![hello-blocks](https://user-images.githubusercontent.com/9021060/168684108-78cbd24b-e6bd-4a04-a8d9-20d535203434.gif) + +Read our [Introduction to Blocks](http://gradio.app/introduction_to_blocks/) guide for more, and join the 🎈 [Gradio Blocks Party](https://huggingface.co/spaces/Gradio-Blocks/README)! + +###### 2. Our Revamped Design 🎨 + +We've upgraded our design across the entire library: from components, and layouts all the way to dark mode. + +![kitchen_sink](https://user-images.githubusercontent.com/9021060/168686333-7a6e3096-3e23-4309-abf2-5cd7736e0463.gif) + +###### 3. A New Website 💻 + +We've upgraded [gradio.app](https://gradio.app) to make it cleaner, faster and easier to use. Our docs now come with components and demos embedded directly on the page. So you can quickly get up to speed with what you're looking for. + +![website](https://user-images.githubusercontent.com/9021060/168687191-10d6a3bd-101f-423a-8193-48f47a5e077d.gif) + +###### 4. New Components: Model3D, Dataset, and More.. + +We've introduced a lot of new components in `3.0`, including `Model3D`, `Dataset`, `Markdown`, `Button` and `Gallery`. You can find all the components and play around with them [here](https://gradio.app/docs/#components). + +![Model3d](https://user-images.githubusercontent.com/9021060/168689062-6ad77151-8cc5-467d-916c-f7c78e52ec0c.gif) + +### Full Changelog: + +- Gradio dash fe by [@pngwn](https://github.com/pngwn) in [PR 807](https://github.com/gradio-app/gradio/pull/807) +- Blocks components by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 765](https://github.com/gradio-app/gradio/pull/765) +- Blocks components V2 by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 843](https://github.com/gradio-app/gradio/pull/843) +- Blocks-Backend-Events by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 844](https://github.com/gradio-app/gradio/pull/844) +- Interfaces from Blocks by [@aliabid94](https://github.com/aliabid94) in [PR 849](https://github.com/gradio-app/gradio/pull/849) +- Blocks dev by [@aliabid94](https://github.com/aliabid94) in [PR 853](https://github.com/gradio-app/gradio/pull/853) +- Started updating demos to use the new `gradio.components` syntax by [@abidlabs](https://github.com/abidlabs) in [PR 848](https://github.com/gradio-app/gradio/pull/848) +- add test infra + add browser tests to CI by [@pngwn](https://github.com/pngwn) in [PR 852](https://github.com/gradio-app/gradio/pull/852) +- 854 textbox by [@pngwn](https://github.com/pngwn) in [PR 859](https://github.com/gradio-app/gradio/pull/859) +- Getting old Python unit tests to pass on `blocks-dev` by [@abidlabs](https://github.com/abidlabs) in [PR 861](https://github.com/gradio-app/gradio/pull/861) +- initialise chatbot with empty array of messages by [@pngwn](https://github.com/pngwn) in [PR 867](https://github.com/gradio-app/gradio/pull/867) +- add test for output to input by [@pngwn](https://github.com/pngwn) in [PR 866](https://github.com/gradio-app/gradio/pull/866) +- More Interface -> Blocks features by [@aliabid94](https://github.com/aliabid94) in [PR 864](https://github.com/gradio-app/gradio/pull/864) +- Fixing external.py in blocks-dev to reflect the new HF Spaces paths by [@abidlabs](https://github.com/abidlabs) in [PR 879](https://github.com/gradio-app/gradio/pull/879) +- backend_default_value_refactoring by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 871](https://github.com/gradio-app/gradio/pull/871) +- fix default_value by [@pngwn](https://github.com/pngwn) in [PR 869](https://github.com/gradio-app/gradio/pull/869) +- fix buttons by [@aliabid94](https://github.com/aliabid94) in [PR 883](https://github.com/gradio-app/gradio/pull/883) +- Checking and updating more demos to use 3.0 syntax by [@abidlabs](https://github.com/abidlabs) in [PR 892](https://github.com/gradio-app/gradio/pull/892) +- Blocks Tests by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 902](https://github.com/gradio-app/gradio/pull/902) +- Interface fix by [@pngwn](https://github.com/pngwn) in [PR 901](https://github.com/gradio-app/gradio/pull/901) +- Quick fix: Issue 893 by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 907](https://github.com/gradio-app/gradio/pull/907) +- 3d Image Component by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 775](https://github.com/gradio-app/gradio/pull/775) +- fix endpoint url in prod by [@pngwn](https://github.com/pngwn) in [PR 911](https://github.com/gradio-app/gradio/pull/911) +- rename Model3d to Image3D by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 912](https://github.com/gradio-app/gradio/pull/912) +- update pypi to 2.9.1 by [@abidlabs](https://github.com/abidlabs) in [PR 916](https://github.com/gradio-app/gradio/pull/916) +- blocks-with-fix by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 917](https://github.com/gradio-app/gradio/pull/917) +- Restore Interpretation, Live, Auth, Queueing by [@aliabid94](https://github.com/aliabid94) in [PR 915](https://github.com/gradio-app/gradio/pull/915) +- Allow `Blocks` instances to be used like a `Block` in other `Blocks` by [@abidlabs](https://github.com/abidlabs) in [PR 919](https://github.com/gradio-app/gradio/pull/919) +- Redesign 1 by [@pngwn](https://github.com/pngwn) in [PR 918](https://github.com/gradio-app/gradio/pull/918) +- blocks-components-tests by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 904](https://github.com/gradio-app/gradio/pull/904) +- fix unit + browser tests by [@pngwn](https://github.com/pngwn) in [PR 926](https://github.com/gradio-app/gradio/pull/926) +- blocks-move-test-data by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 927](https://github.com/gradio-app/gradio/pull/927) +- remove debounce from form inputs by [@pngwn](https://github.com/pngwn) in [PR 932](https://github.com/gradio-app/gradio/pull/932) +- reimplement webcam video by [@pngwn](https://github.com/pngwn) in [PR 928](https://github.com/gradio-app/gradio/pull/928) +- blocks-move-test-data by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 941](https://github.com/gradio-app/gradio/pull/941) +- allow audio components to take a string value by [@pngwn](https://github.com/pngwn) in [PR 930](https://github.com/gradio-app/gradio/pull/930) +- static mode for textbox by [@pngwn](https://github.com/pngwn) in [PR 929](https://github.com/gradio-app/gradio/pull/929) +- fix file upload text by [@pngwn](https://github.com/pngwn) in [PR 931](https://github.com/gradio-app/gradio/pull/931) +- tabbed-interface-rewritten by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 958](https://github.com/gradio-app/gradio/pull/958) +- Gan demo fix by [@abidlabs](https://github.com/abidlabs) in [PR 965](https://github.com/gradio-app/gradio/pull/965) +- Blocks analytics by [@abidlabs](https://github.com/abidlabs) in [PR 947](https://github.com/gradio-app/gradio/pull/947) +- Blocks page load by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 963](https://github.com/gradio-app/gradio/pull/963) +- add frontend for page load events by [@pngwn](https://github.com/pngwn) in [PR 967](https://github.com/gradio-app/gradio/pull/967) +- fix i18n and some tweaks by [@pngwn](https://github.com/pngwn) in [PR 966](https://github.com/gradio-app/gradio/pull/966) +- add jinja2 to reqs by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 969](https://github.com/gradio-app/gradio/pull/969) +- Cleaning up `Launchable()` by [@abidlabs](https://github.com/abidlabs) in [PR 968](https://github.com/gradio-app/gradio/pull/968) +- Fix #944 by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 971](https://github.com/gradio-app/gradio/pull/971) +- New Blocks Demo: neural instrument cloning by [@abidlabs](https://github.com/abidlabs) in [PR 975](https://github.com/gradio-app/gradio/pull/975) +- Add huggingface_hub client library by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 973](https://github.com/gradio-app/gradio/pull/973) +- State and variables by [@aliabid94](https://github.com/aliabid94) in [PR 977](https://github.com/gradio-app/gradio/pull/977) +- update-components by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 986](https://github.com/gradio-app/gradio/pull/986) +- ensure dataframe updates as expected by [@pngwn](https://github.com/pngwn) in [PR 981](https://github.com/gradio-app/gradio/pull/981) +- test-guideline by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 990](https://github.com/gradio-app/gradio/pull/990) +- Issue #785: add footer by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 972](https://github.com/gradio-app/gradio/pull/972) +- indentation fix by [@abidlabs](https://github.com/abidlabs) in [PR 993](https://github.com/gradio-app/gradio/pull/993) +- missing quote by [@aliabd](https://github.com/aliabd) in [PR 996](https://github.com/gradio-app/gradio/pull/996) +- added interactive parameter to components by [@abidlabs](https://github.com/abidlabs) in [PR 992](https://github.com/gradio-app/gradio/pull/992) +- custom-components by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 985](https://github.com/gradio-app/gradio/pull/985) +- Refactor component shortcuts by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 995](https://github.com/gradio-app/gradio/pull/995) +- Plot Component by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 805](https://github.com/gradio-app/gradio/pull/805) +- updated PyPi version to 2.9.2 by [@abidlabs](https://github.com/abidlabs) in [PR 1002](https://github.com/gradio-app/gradio/pull/1002) +- Release 2.9.3 by [@abidlabs](https://github.com/abidlabs) in [PR 1003](https://github.com/gradio-app/gradio/pull/1003) +- Image3D Examples Fix by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1001](https://github.com/gradio-app/gradio/pull/1001) +- release 2.9.4 by [@abidlabs](https://github.com/abidlabs) in [PR 1006](https://github.com/gradio-app/gradio/pull/1006) +- templates import hotfix by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1008](https://github.com/gradio-app/gradio/pull/1008) +- Progress indicator bar by [@aliabid94](https://github.com/aliabid94) in [PR 997](https://github.com/gradio-app/gradio/pull/997) +- Fixed image input for absolute path by [@JefferyChiang](https://github.com/JefferyChiang) in [PR 1004](https://github.com/gradio-app/gradio/pull/1004) +- Model3D + Plot Components by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1010](https://github.com/gradio-app/gradio/pull/1010) +- Gradio Guides: Creating CryptoPunks with GANs by [@NimaBoscarino](https://github.com/NimaBoscarino) in [PR 1000](https://github.com/gradio-app/gradio/pull/1000) +- [BIG PR] Gradio blocks & redesigned components by [@abidlabs](https://github.com/abidlabs) in [PR 880](https://github.com/gradio-app/gradio/pull/880) +- fixed failing test on main by [@abidlabs](https://github.com/abidlabs) in [PR 1023](https://github.com/gradio-app/gradio/pull/1023) +- Use smaller ASR model in external test by [@abidlabs](https://github.com/abidlabs) in [PR 1024](https://github.com/gradio-app/gradio/pull/1024) +- updated PyPi version to 2.9.0b by [@abidlabs](https://github.com/abidlabs) in [PR 1026](https://github.com/gradio-app/gradio/pull/1026) +- Fixing import issues so that the package successfully installs on colab notebooks by [@abidlabs](https://github.com/abidlabs) in [PR 1027](https://github.com/gradio-app/gradio/pull/1027) +- Update website tracker slackbot by [@aliabd](https://github.com/aliabd) in [PR 1037](https://github.com/gradio-app/gradio/pull/1037) +- textbox-autoheight by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1009](https://github.com/gradio-app/gradio/pull/1009) +- Model3D Examples fixes by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1035](https://github.com/gradio-app/gradio/pull/1035) +- GAN Gradio Guide: Adjustments to iframe heights by [@NimaBoscarino](https://github.com/NimaBoscarino) in [PR 1042](https://github.com/gradio-app/gradio/pull/1042) +- added better default labels to form components by [@abidlabs](https://github.com/abidlabs) in [PR 1040](https://github.com/gradio-app/gradio/pull/1040) +- Slackbot web tracker fix by [@aliabd](https://github.com/aliabd) in [PR 1043](https://github.com/gradio-app/gradio/pull/1043) +- Plot fixes by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1044](https://github.com/gradio-app/gradio/pull/1044) +- Small fixes to the demos by [@abidlabs](https://github.com/abidlabs) in [PR 1030](https://github.com/gradio-app/gradio/pull/1030) +- fixing demo issue with website by [@aliabd](https://github.com/aliabd) in [PR 1047](https://github.com/gradio-app/gradio/pull/1047) +- [hotfix] HighlightedText by [@aliabid94](https://github.com/aliabid94) in [PR 1046](https://github.com/gradio-app/gradio/pull/1046) +- Update text by [@ronvoluted](https://github.com/ronvoluted) in [PR 1050](https://github.com/gradio-app/gradio/pull/1050) +- Update CONTRIBUTING.md by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1052](https://github.com/gradio-app/gradio/pull/1052) +- fix(ui): Increase contrast for footer by [@ronvoluted](https://github.com/ronvoluted) in [PR 1048](https://github.com/gradio-app/gradio/pull/1048) +- UI design update by [@gary149](https://github.com/gary149) in [PR 1041](https://github.com/gradio-app/gradio/pull/1041) +- updated PyPi version to 2.9.0b8 by [@abidlabs](https://github.com/abidlabs) in [PR 1059](https://github.com/gradio-app/gradio/pull/1059) +- Running, testing, and fixing demos by [@abidlabs](https://github.com/abidlabs) in [PR 1060](https://github.com/gradio-app/gradio/pull/1060) +- Form layout by [@pngwn](https://github.com/pngwn) in [PR 1054](https://github.com/gradio-app/gradio/pull/1054) +- inputless-interfaces by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1038](https://github.com/gradio-app/gradio/pull/1038) +- Update PULL_REQUEST_TEMPLATE.md by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1068](https://github.com/gradio-app/gradio/pull/1068) +- Upgrading node memory to 4gb in website Docker by [@aliabd](https://github.com/aliabd) in [PR 1069](https://github.com/gradio-app/gradio/pull/1069) +- Website reload error by [@aliabd](https://github.com/aliabd) in [PR 1079](https://github.com/gradio-app/gradio/pull/1079) +- fixed favicon issue by [@abidlabs](https://github.com/abidlabs) in [PR 1064](https://github.com/gradio-app/gradio/pull/1064) +- remove-queue-from-events by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1056](https://github.com/gradio-app/gradio/pull/1056) +- Enable vertex colors for OBJs files by [@radames](https://github.com/radames) in [PR 1074](https://github.com/gradio-app/gradio/pull/1074) +- Dark text by [@ronvoluted](https://github.com/ronvoluted) in [PR 1049](https://github.com/gradio-app/gradio/pull/1049) +- Scroll to output by [@pngwn](https://github.com/pngwn) in [PR 1077](https://github.com/gradio-app/gradio/pull/1077) +- Explicitly list pnpm version 6 in contributing guide by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1085](https://github.com/gradio-app/gradio/pull/1085) +- hotfix for encrypt issue by [@abidlabs](https://github.com/abidlabs) in [PR 1096](https://github.com/gradio-app/gradio/pull/1096) +- Release 2.9b9 by [@abidlabs](https://github.com/abidlabs) in [PR 1098](https://github.com/gradio-app/gradio/pull/1098) +- tweak node circleci settings by [@pngwn](https://github.com/pngwn) in [PR 1091](https://github.com/gradio-app/gradio/pull/1091) +- Website Reload Error by [@aliabd](https://github.com/aliabd) in [PR 1099](https://github.com/gradio-app/gradio/pull/1099) +- Website Reload: README in demos docker by [@aliabd](https://github.com/aliabd) in [PR 1100](https://github.com/gradio-app/gradio/pull/1100) +- Flagging fixes by [@abidlabs](https://github.com/abidlabs) in [PR 1081](https://github.com/gradio-app/gradio/pull/1081) +- Backend for optional labels by [@abidlabs](https://github.com/abidlabs) in [PR 1080](https://github.com/gradio-app/gradio/pull/1080) +- Optional labels fe by [@pngwn](https://github.com/pngwn) in [PR 1105](https://github.com/gradio-app/gradio/pull/1105) +- clean-deprecated-parameters by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1090](https://github.com/gradio-app/gradio/pull/1090) +- Blocks rendering fix by [@abidlabs](https://github.com/abidlabs) in [PR 1102](https://github.com/gradio-app/gradio/pull/1102) +- Redos #1106 by [@abidlabs](https://github.com/abidlabs) in [PR 1112](https://github.com/gradio-app/gradio/pull/1112) +- Interface types: handle input-only, output-only, and unified interfaces by [@abidlabs](https://github.com/abidlabs) in [PR 1108](https://github.com/gradio-app/gradio/pull/1108) +- Hotfix + New pypi release 2.9b11 by [@abidlabs](https://github.com/abidlabs) in [PR 1118](https://github.com/gradio-app/gradio/pull/1118) +- issue-checkbox by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1122](https://github.com/gradio-app/gradio/pull/1122) +- issue-checkbox-hotfix by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1127](https://github.com/gradio-app/gradio/pull/1127) +- Fix demos in website by [@aliabd](https://github.com/aliabd) in [PR 1130](https://github.com/gradio-app/gradio/pull/1130) +- Guide for Gradio ONNX model zoo on Huggingface by [@AK391](https://github.com/AK391) in [PR 1073](https://github.com/gradio-app/gradio/pull/1073) +- ONNX guide fixes by [@aliabd](https://github.com/aliabd) in [PR 1131](https://github.com/gradio-app/gradio/pull/1131) +- Stacked form inputs css by [@gary149](https://github.com/gary149) in [PR 1134](https://github.com/gradio-app/gradio/pull/1134) +- made default value in textbox empty string by [@abidlabs](https://github.com/abidlabs) in [PR 1135](https://github.com/gradio-app/gradio/pull/1135) +- Examples UI by [@gary149](https://github.com/gary149) in [PR 1121](https://github.com/gradio-app/gradio/pull/1121) +- Chatbot custom color support by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1092](https://github.com/gradio-app/gradio/pull/1092) +- highlighted text colors by [@pngwn](https://github.com/pngwn) in [PR 1119](https://github.com/gradio-app/gradio/pull/1119) +- pin to pnpm 6 for now by [@pngwn](https://github.com/pngwn) in [PR 1147](https://github.com/gradio-app/gradio/pull/1147) +- Restore queue in Blocks by [@aliabid94](https://github.com/aliabid94) in [PR 1137](https://github.com/gradio-app/gradio/pull/1137) +- add select event for tabitems by [@pngwn](https://github.com/pngwn) in [PR 1154](https://github.com/gradio-app/gradio/pull/1154) +- max_lines + autoheight for textbox by [@pngwn](https://github.com/pngwn) in [PR 1153](https://github.com/gradio-app/gradio/pull/1153) +- use color palette for chatbot by [@pngwn](https://github.com/pngwn) in [PR 1152](https://github.com/gradio-app/gradio/pull/1152) +- Timeseries improvements by [@pngwn](https://github.com/pngwn) in [PR 1149](https://github.com/gradio-app/gradio/pull/1149) +- move styling for interface panels to frontend by [@pngwn](https://github.com/pngwn) in [PR 1146](https://github.com/gradio-app/gradio/pull/1146) +- html tweaks by [@pngwn](https://github.com/pngwn) in [PR 1145](https://github.com/gradio-app/gradio/pull/1145) +- Issue #768: Support passing none to resize and crop image by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1144](https://github.com/gradio-app/gradio/pull/1144) +- image gallery component + img css by [@aliabid94](https://github.com/aliabid94) in [PR 1140](https://github.com/gradio-app/gradio/pull/1140) +- networking tweak by [@abidlabs](https://github.com/abidlabs) in [PR 1143](https://github.com/gradio-app/gradio/pull/1143) +- Allow enabling queue per event listener by [@aliabid94](https://github.com/aliabid94) in [PR 1155](https://github.com/gradio-app/gradio/pull/1155) +- config hotfix and v. 2.9b23 by [@abidlabs](https://github.com/abidlabs) in [PR 1158](https://github.com/gradio-app/gradio/pull/1158) +- Custom JS calls by [@aliabid94](https://github.com/aliabid94) in [PR 1082](https://github.com/gradio-app/gradio/pull/1082) +- Small fixes: queue default fix, ffmpeg installation message by [@abidlabs](https://github.com/abidlabs) in [PR 1159](https://github.com/gradio-app/gradio/pull/1159) +- formatting by [@abidlabs](https://github.com/abidlabs) in [PR 1161](https://github.com/gradio-app/gradio/pull/1161) +- enable flex grow for gr-box by [@radames](https://github.com/radames) in [PR 1165](https://github.com/gradio-app/gradio/pull/1165) +- 1148 loading by [@pngwn](https://github.com/pngwn) in [PR 1164](https://github.com/gradio-app/gradio/pull/1164) +- Put enable_queue kwarg back in launch() by [@aliabid94](https://github.com/aliabid94) in [PR 1167](https://github.com/gradio-app/gradio/pull/1167) +- A few small fixes by [@abidlabs](https://github.com/abidlabs) in [PR 1171](https://github.com/gradio-app/gradio/pull/1171) +- Hotfix for dropdown component by [@abidlabs](https://github.com/abidlabs) in [PR 1172](https://github.com/gradio-app/gradio/pull/1172) +- use secondary buttons in interface by [@pngwn](https://github.com/pngwn) in [PR 1173](https://github.com/gradio-app/gradio/pull/1173) +- 1183 component height by [@pngwn](https://github.com/pngwn) in [PR 1185](https://github.com/gradio-app/gradio/pull/1185) +- 962 dataframe by [@pngwn](https://github.com/pngwn) in [PR 1186](https://github.com/gradio-app/gradio/pull/1186) +- update-contributing by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1188](https://github.com/gradio-app/gradio/pull/1188) +- Table tweaks by [@pngwn](https://github.com/pngwn) in [PR 1195](https://github.com/gradio-app/gradio/pull/1195) +- wrap tab content in column by [@pngwn](https://github.com/pngwn) in [PR 1200](https://github.com/gradio-app/gradio/pull/1200) +- WIP: Add dark mode support by [@gary149](https://github.com/gary149) in [PR 1187](https://github.com/gradio-app/gradio/pull/1187) +- Restored /api/predict/ endpoint for Interfaces by [@abidlabs](https://github.com/abidlabs) in [PR 1199](https://github.com/gradio-app/gradio/pull/1199) +- hltext-label by [@pngwn](https://github.com/pngwn) in [PR 1204](https://github.com/gradio-app/gradio/pull/1204) +- add copy functionality to json by [@pngwn](https://github.com/pngwn) in [PR 1205](https://github.com/gradio-app/gradio/pull/1205) +- Update component config by [@aliabid94](https://github.com/aliabid94) in [PR 1089](https://github.com/gradio-app/gradio/pull/1089) +- fix placeholder prompt by [@pngwn](https://github.com/pngwn) in [PR 1215](https://github.com/gradio-app/gradio/pull/1215) +- ensure webcam video value is propagated correctly by [@pngwn](https://github.com/pngwn) in [PR 1218](https://github.com/gradio-app/gradio/pull/1218) +- Automatic word-break in highlighted text, combine_adjacent support by [@aliabid94](https://github.com/aliabid94) in [PR 1209](https://github.com/gradio-app/gradio/pull/1209) +- async-function-support by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1190](https://github.com/gradio-app/gradio/pull/1190) +- Sharing fix for assets by [@aliabid94](https://github.com/aliabid94) in [PR 1208](https://github.com/gradio-app/gradio/pull/1208) +- Hotfixes for course demos by [@abidlabs](https://github.com/abidlabs) in [PR 1222](https://github.com/gradio-app/gradio/pull/1222) +- Allow Custom CSS by [@aliabid94](https://github.com/aliabid94) in [PR 1170](https://github.com/gradio-app/gradio/pull/1170) +- share-hotfix by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1226](https://github.com/gradio-app/gradio/pull/1226) +- tweaks by [@pngwn](https://github.com/pngwn) in [PR 1229](https://github.com/gradio-app/gradio/pull/1229) +- white space for class concatenation by [@radames](https://github.com/radames) in [PR 1228](https://github.com/gradio-app/gradio/pull/1228) +- Tweaks by [@pngwn](https://github.com/pngwn) in [PR 1230](https://github.com/gradio-app/gradio/pull/1230) +- css tweaks by [@pngwn](https://github.com/pngwn) in [PR 1235](https://github.com/gradio-app/gradio/pull/1235) +- ensure defaults height match for media inputs by [@pngwn](https://github.com/pngwn) in [PR 1236](https://github.com/gradio-app/gradio/pull/1236) +- Default Label label value by [@radames](https://github.com/radames) in [PR 1239](https://github.com/gradio-app/gradio/pull/1239) +- update-shortcut-syntax by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1234](https://github.com/gradio-app/gradio/pull/1234) +- Update version.txt by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1244](https://github.com/gradio-app/gradio/pull/1244) +- Layout bugs by [@pngwn](https://github.com/pngwn) in [PR 1246](https://github.com/gradio-app/gradio/pull/1246) +- Update demo by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1253](https://github.com/gradio-app/gradio/pull/1253) +- Button default name by [@FarukOzderim](https://github.com/FarukOzderim) in [PR 1243](https://github.com/gradio-app/gradio/pull/1243) +- Labels spacing by [@gary149](https://github.com/gary149) in [PR 1254](https://github.com/gradio-app/gradio/pull/1254) +- add global loader for gradio app by [@pngwn](https://github.com/pngwn) in [PR 1251](https://github.com/gradio-app/gradio/pull/1251) +- ui apis for dalle-mini by [@pngwn](https://github.com/pngwn) in [PR 1258](https://github.com/gradio-app/gradio/pull/1258) +- Add precision to Number, backend only by [@freddyaboulton](https://github.com/freddyaboulton) in [PR 1125](https://github.com/gradio-app/gradio/pull/1125) +- Website Design Changes by [@abidlabs](https://github.com/abidlabs) in [PR 1015](https://github.com/gradio-app/gradio/pull/1015) +- Small fixes for multiple demos compatible with 3.0 by [@radames](https://github.com/radames) in [PR 1257](https://github.com/gradio-app/gradio/pull/1257) +- Issue #1160: Model 3D component not destroyed correctly by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 1219](https://github.com/gradio-app/gradio/pull/1219) +- Fixes to components by [@abidlabs](https://github.com/abidlabs) in [PR 1260](https://github.com/gradio-app/gradio/pull/1260) +- layout docs by [@abidlabs](https://github.com/abidlabs) in [PR 1263](https://github.com/gradio-app/gradio/pull/1263) +- Static forms by [@pngwn](https://github.com/pngwn) in [PR 1264](https://github.com/gradio-app/gradio/pull/1264) +- Cdn assets by [@pngwn](https://github.com/pngwn) in [PR 1265](https://github.com/gradio-app/gradio/pull/1265) +- update logo by [@gary149](https://github.com/gary149) in [PR 1266](https://github.com/gradio-app/gradio/pull/1266) +- fix slider by [@aliabid94](https://github.com/aliabid94) in [PR 1268](https://github.com/gradio-app/gradio/pull/1268) +- maybe fix auth in iframes by [@pngwn](https://github.com/pngwn) in [PR 1261](https://github.com/gradio-app/gradio/pull/1261) +- Improves "Getting Started" guide by [@abidlabs](https://github.com/abidlabs) in [PR 1269](https://github.com/gradio-app/gradio/pull/1269) +- Add embedded demos to website by [@aliabid94](https://github.com/aliabid94) in [PR 1270](https://github.com/gradio-app/gradio/pull/1270) +- Label hotfixes by [@abidlabs](https://github.com/abidlabs) in [PR 1281](https://github.com/gradio-app/gradio/pull/1281) +- General tweaks by [@pngwn](https://github.com/pngwn) in [PR 1276](https://github.com/gradio-app/gradio/pull/1276) +- only affect links within the document by [@pngwn](https://github.com/pngwn) in [PR 1282](https://github.com/gradio-app/gradio/pull/1282) +- release 3.0b9 by [@abidlabs](https://github.com/abidlabs) in [PR 1283](https://github.com/gradio-app/gradio/pull/1283) +- Dm by [@pngwn](https://github.com/pngwn) in [PR 1284](https://github.com/gradio-app/gradio/pull/1284) +- Website fixes by [@aliabd](https://github.com/aliabd) in [PR 1286](https://github.com/gradio-app/gradio/pull/1286) +- Create Streamables by [@aliabid94](https://github.com/aliabid94) in [PR 1279](https://github.com/gradio-app/gradio/pull/1279) +- ensure table works on mobile by [@pngwn](https://github.com/pngwn) in [PR 1277](https://github.com/gradio-app/gradio/pull/1277) +- changes by [@aliabid94](https://github.com/aliabid94) in [PR 1287](https://github.com/gradio-app/gradio/pull/1287) +- demo alignment on landing page by [@aliabd](https://github.com/aliabd) in [PR 1288](https://github.com/gradio-app/gradio/pull/1288) +- New meta img by [@aliabd](https://github.com/aliabd) in [PR 1289](https://github.com/gradio-app/gradio/pull/1289) +- updated PyPi version to 3.0 by [@abidlabs](https://github.com/abidlabs) in [PR 1290](https://github.com/gradio-app/gradio/pull/1290) +- Fix site by [@aliabid94](https://github.com/aliabid94) in [PR 1291](https://github.com/gradio-app/gradio/pull/1291) +- Mobile responsive guides by [@aliabd](https://github.com/aliabd) in [PR 1293](https://github.com/gradio-app/gradio/pull/1293) +- Update readme by [@abidlabs](https://github.com/abidlabs) in [PR 1292](https://github.com/gradio-app/gradio/pull/1292) +- gif by [@abidlabs](https://github.com/abidlabs) in [PR 1296](https://github.com/gradio-app/gradio/pull/1296) +- Allow decoding headerless b64 string [@1lint](https://github.com/1lint) in [PR 4031](https://github.com/gradio-app/gradio/pull/4031) + +### Contributors Shoutout: + +- [@JefferyChiang](https://github.com/JefferyChiang) made their first contribution in [PR 1004](https://github.com/gradio-app/gradio/pull/1004) +- [@NimaBoscarino](https://github.com/NimaBoscarino) made their first contribution in [PR 1000](https://github.com/gradio-app/gradio/pull/1000) +- [@ronvoluted](https://github.com/ronvoluted) made their first contribution in [PR 1050](https://github.com/gradio-app/gradio/pull/1050) +- [@radames](https://github.com/radames) made their first contribution in [PR 1074](https://github.com/gradio-app/gradio/pull/1074) +- [@freddyaboulton](https://github.com/freddyaboulton) made their first contribution in [PR 1085](https://github.com/gradio-app/gradio/pull/1085) +- [@liteli1987gmail](https://github.com/liteli1987gmail) & [@chenglu](https://github.com/chenglu) made their first contribution in [PR 4767](https://github.com/gradio-app/gradio/pull/4767) \ No newline at end of file diff --git a/testbed/gradio-app__gradio/gradio/__init__.py b/testbed/gradio-app__gradio/gradio/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2cefd271dadbc64fed19740306872384166f6a4c --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/__init__.py @@ -0,0 +1,108 @@ +import json + +import gradio.components as components +import gradio.inputs as inputs +import gradio.outputs as outputs +import gradio.processing_utils +import gradio.templates +import gradio.themes as themes +from gradio.blocks import Blocks +from gradio.chat_interface import ChatInterface +from gradio.components import ( + HTML, + JSON, + AnnotatedImage, + Annotatedimage, + Audio, + BarPlot, + Button, + Carousel, + Chatbot, + Checkbox, + CheckboxGroup, + Checkboxgroup, + ClearButton, + Code, + ColorPicker, + DataFrame, + Dataframe, + Dataset, + Dropdown, + DuplicateButton, + File, + Gallery, + Highlight, + HighlightedText, + Highlightedtext, + Image, + Interpretation, + Json, + Label, + LinePlot, + LoginButton, + LogoutButton, + Markdown, + Model3D, + Number, + Plot, + Radio, + ScatterPlot, + Slider, + State, + StatusTracker, + Text, + Textbox, + TimeSeries, + Timeseries, + UploadButton, + Variable, + Video, + component, +) +from gradio.deploy_space import deploy +from gradio.events import LikeData, SelectData, on +from gradio.exceptions import Error +from gradio.external import load +from gradio.flagging import ( + CSVLogger, + FlaggingCallback, + HuggingFaceDatasetJSONSaver, + HuggingFaceDatasetSaver, + SimpleCSVLogger, +) +from gradio.helpers import ( + EventData, + Info, + Progress, + Warning, + make_waveform, + skip, + update, +) +from gradio.helpers import create_examples as Examples # noqa: N812 +from gradio.interface import Interface, TabbedInterface, close_all +from gradio.ipython_ext import load_ipython_extension +from gradio.layouts import Accordion, Box, Column, Group, Row, Tab, TabItem, Tabs +from gradio.mix import Parallel, Series +from gradio.oauth import OAuthProfile +from gradio.routes import Request, mount_gradio_app +from gradio.templates import ( + Files, + ImageMask, + ImagePaint, + List, + Matrix, + Mic, + Microphone, + Numpy, + Paint, + Pil, + PlayableVideo, + Sketchpad, + TextArea, + Webcam, +) +from gradio.themes import Base as Theme +from gradio.utils import get_package_version + +__version__ = get_package_version() diff --git a/testbed/gradio-app__gradio/gradio/blocks.py b/testbed/gradio-app__gradio/gradio/blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..b6dc9828b6952d742669cafccdddfd4db9e38c5f --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/blocks.py @@ -0,0 +1,2398 @@ +from __future__ import annotations + +import copy +import inspect +import json +import os +import random +import secrets +import sys +import threading +import time +import warnings +import webbrowser +from abc import abstractmethod +from collections import defaultdict +from functools import wraps +from pathlib import Path +from types import ModuleType +from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Literal, Sequence, cast + +import anyio +import requests +from anyio import CapacityLimiter +from gradio_client import serializing +from gradio_client import utils as client_utils +from gradio_client.documentation import document, set_documentation_group +from packaging import version + +from gradio import ( + analytics, + components, + external, + networking, + queueing, + routes, + strings, + themes, + utils, + wasm_utils, +) +from gradio.context import Context +from gradio.deprecation import check_deprecated_parameters, warn_deprecation +from gradio.exceptions import ( + DuplicateBlockError, + InvalidApiNameError, + InvalidBlockError, +) +from gradio.helpers import EventData, create_tracker, skip, special_args +from gradio.state_holder import SessionState +from gradio.themes import Default as DefaultTheme +from gradio.themes import ThemeClass as Theme +from gradio.tunneling import ( + BINARY_FILENAME, + BINARY_FOLDER, + BINARY_PATH, + BINARY_URL, + CURRENT_TUNNELS, +) +from gradio.utils import ( + TupleNoPrint, + check_function_inputs_match, + component_or_layout_class, + concurrency_count_warning, + get_cancel_function, + get_continuous_fn, + get_package_version, +) + +try: + import spaces # type: ignore +except Exception: + spaces = None + +set_documentation_group("blocks") + +if TYPE_CHECKING: # Only import for type checking (is False at runtime). + from fastapi.applications import FastAPI + + from gradio.components import Component + from gradio.events import EventListenerMethod + +BUILT_IN_THEMES: dict[str, Theme] = { + t.name: t + for t in [ + themes.Base(), + themes.Default(), + themes.Monochrome(), + themes.Soft(), + themes.Glass(), + ] +} + + +def in_event_listener(): + from gradio.context import LocalContext + + return LocalContext.in_event_listener.get() + + +def updateable(fn): + @wraps(fn) + def wrapper(*args, **kwargs): + fn_args = inspect.getfullargspec(fn).args + self = args[0] + for i, arg in enumerate(args): + if i == 0 or i >= len(fn_args): # skip self, *args + continue + arg_name = fn_args[i] + kwargs[arg_name] = arg + self.constructor_args = kwargs + if in_event_listener(): + return None + else: + return fn(self, **kwargs) + + return wrapper + + +updated_cls_set = set() + + +class Updateable: + def __new__(cls, *args, **kwargs): + if cls not in updated_cls_set: + cls.__init__ = updateable(cls.__init__) + updated_cls_set.add(cls) + return super().__new__(cls) + + +class Block: + def __init__( + self, + *, + render: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + visible: bool = True, + root_url: str | None = None, # URL that is prepended to all file paths + _skip_init_processing: bool = False, # Used for loading from Spaces + **kwargs, + ): + self._id = Context.id + Context.id += 1 + self.visible = visible + self.elem_id = elem_id + self.elem_classes = ( + [elem_classes] if isinstance(elem_classes, str) else elem_classes + ) + self.root_url = root_url + self.share_token = secrets.token_urlsafe(32) + self._skip_init_processing = _skip_init_processing + self.parent: BlockContext | None = None + self.is_rendered: bool = False + self.constructor_args: dict + self.state_session_capacity = 10000 + + if render: + self.render() + check_deprecated_parameters(self.__class__.__name__, kwargs=kwargs) + + def render(self): + """ + Adds self into appropriate BlockContext + """ + if Context.root_block is not None and self._id in Context.root_block.blocks: + raise DuplicateBlockError( + f"A block with id: {self._id} has already been rendered in the current Blocks." + ) + if Context.block is not None: + Context.block.add(self) + if Context.root_block is not None: + Context.root_block.blocks[self._id] = self + self.is_rendered = True + if isinstance(self, components.IOComponent): + Context.root_block.temp_file_sets.append(self.temp_files) + return self + + def unrender(self): + """ + Removes self from BlockContext if it has been rendered (otherwise does nothing). + Removes self from the layout and collection of blocks, but does not delete any event triggers. + """ + if Context.block is not None: + try: + Context.block.children.remove(self) + except ValueError: + pass + if Context.root_block is not None: + try: + del Context.root_block.blocks[self._id] + self.is_rendered = False + except KeyError: + pass + return self + + def get_block_name(self) -> str: + """ + Gets block's class name. + + If it is template component it gets the parent's class name. + + @return: class name + """ + return ( + self.__class__.__base__.__name__.lower() + if hasattr(self, "is_template") + else self.__class__.__name__.lower() + ) + + def get_expected_parent(self) -> type[BlockContext] | None: + return None + + def get_config(self): + config = {} + signature = inspect.signature(self.__class__.__init__) + for parameter in signature.parameters.values(): + if hasattr(self, parameter.name): + value = getattr(self, parameter.name) + config[parameter.name] = value + return {**config, "root_url": self.root_url, "name": self.get_block_name()} + + @staticmethod + @abstractmethod + def update(**kwargs) -> dict: + return {} + + @classmethod + def get_specific_update(cls, generic_update: dict[str, Any]) -> dict: + generic_update = generic_update.copy() + del generic_update["__type__"] + specific_update = cls.update(**generic_update) + specific_update = utils.delete_none(specific_update, skip_value=True) + return specific_update + + +class BlockContext(Block): + def __init__( + self, + visible: bool = True, + render: bool = True, + **kwargs, + ): + """ + Parameters: + visible: If False, this will be hidden but included in the Blocks config file (its visibility can later be updated). + render: If False, this will not be included in the Blocks config file at all. + """ + self.children: list[Block] = [] + Block.__init__(self, visible=visible, render=render, **kwargs) + + def add_child(self, child: Block): + self.children.append(child) + + def __enter__(self): + self.parent = Context.block + Context.block = self + return self + + def add(self, child: Block): + child.parent = self + self.children.append(child) + + def fill_expected_parents(self): + children = [] + pseudo_parent = None + for child in self.children: + expected_parent = child.get_expected_parent() + if not expected_parent or isinstance(self, expected_parent): + pseudo_parent = None + children.append(child) + else: + if pseudo_parent is not None and isinstance( + pseudo_parent, expected_parent + ): + pseudo_parent.add_child(child) + else: + pseudo_parent = expected_parent(render=False) + pseudo_parent.parent = self + children.append(pseudo_parent) + pseudo_parent.add_child(child) + if Context.root_block: + Context.root_block.blocks[pseudo_parent._id] = pseudo_parent + child.parent = pseudo_parent + self.children = children + + def __exit__(self, exc_type: type[BaseException] | None = None, *args): + Context.block = self.parent + if exc_type is not None: + return + if getattr(self, "allow_expected_parents", True): + self.fill_expected_parents() + + def postprocess(self, y): + """ + Any postprocessing needed to be performed on a block context. + """ + return y + + +class BlockFunction: + def __init__( + self, + fn: Callable | None, + inputs: list[Component], + outputs: list[Component], + preprocess: bool, + postprocess: bool, + inputs_as_dict: bool, + tracks_progress: bool = False, + ): + self.fn = fn + self.inputs = inputs + self.outputs = outputs + self.preprocess = preprocess + self.postprocess = postprocess + self.tracks_progress = tracks_progress + self.total_runtime = 0 + self.total_runs = 0 + self.inputs_as_dict = inputs_as_dict + self.name = getattr(fn, "__name__", "fn") if fn is not None else None + self.spaces_auto_wrap() + + def spaces_auto_wrap(self): + if spaces is None: + return + if utils.get_space() is None: + return + self.fn = spaces.gradio_auto_wrap(self.fn) + + def __str__(self): + return str( + { + "fn": self.name, + "preprocess": self.preprocess, + "postprocess": self.postprocess, + } + ) + + def __repr__(self): + return str(self) + + +def postprocess_update_dict(block: Block, update_dict: dict, postprocess: bool = True): + """ + Converts a dictionary of updates into a format that can be sent to the frontend. + E.g. {"__type__": "generic_update", "value": "2", "interactive": False} + Into -> {"__type__": "update", "value": 2.0, "mode": "static"} + + Parameters: + block: The Block that is being updated with this update dictionary. + update_dict: The original update dictionary + postprocess: Whether to postprocess the "value" key of the update dictionary. + """ + if update_dict.get("__type__", "") == "generic_update": + update_dict = block.get_specific_update(update_dict) + if update_dict.get("value") is components._Keywords.NO_VALUE: + update_dict.pop("value") + interactive = update_dict.pop("interactive", None) + if interactive is not None: + update_dict["mode"] = "dynamic" if interactive else "static" + attr_dict = { + k: getattr(block, k) if hasattr(block, k) else v for k, v in update_dict.items() + } + attr_dict["__type__"] = "update" + attr_dict.pop("value", None) + if "value" in update_dict: + assert isinstance( + block, components.IOComponent + ), f"Component {block.__class__} does not support value" + if postprocess: + attr_dict["value"] = block.postprocess(update_dict["value"]) + else: + attr_dict["value"] = update_dict["value"] + return attr_dict + + +def convert_component_dict_to_list( + outputs_ids: list[int], predictions: dict +) -> list | dict: + """ + Converts a dictionary of component updates into a list of updates in the order of + the outputs_ids and including every output component. Leaves other types of dictionaries unchanged. + E.g. {"textbox": "hello", "number": {"__type__": "generic_update", "value": "2"}} + Into -> ["hello", {"__type__": "generic_update"}, {"__type__": "generic_update", "value": "2"}] + """ + keys_are_blocks = [isinstance(key, Block) for key in predictions] + if all(keys_are_blocks): + reordered_predictions = [skip() for _ in outputs_ids] + for component, value in predictions.items(): + if component._id not in outputs_ids: + raise ValueError( + f"Returned component {component} not specified as output of function." + ) + output_index = outputs_ids.index(component._id) + reordered_predictions[output_index] = value + predictions = utils.resolve_singleton(reordered_predictions) + elif any(keys_are_blocks): + raise ValueError( + "Returned dictionary included some keys as Components. Either all keys must be Components to assign Component values, or return a List of values to assign output values in order." + ) + return predictions + + +def get_api_info(config: dict, serialize: bool = True): + """ + Gets the information needed to generate the API docs from a Blocks config. + Parameters: + config: a Blocks config dictionary + serialize: If True, returns the serialized version of the typed information. If False, returns the raw version. + """ + api_info = {"named_endpoints": {}, "unnamed_endpoints": {}} + mode = config.get("mode", None) + after_new_format = version.parse(config.get("version", "2.0")) > version.Version( + "3.28.3" + ) + + for d, dependency in enumerate(config["dependencies"]): + dependency_info = {"parameters": [], "returns": []} + skip_endpoint = False + + inputs = dependency["inputs"] + for i in inputs: + for component in config["components"]: + if component["id"] == i: + break + else: + skip_endpoint = True # if component not found, skip endpoint + break + type = component["type"] + if type in client_utils.SKIP_COMPONENTS: + continue + if ( + not component.get("serializer") + and type not in serializing.COMPONENT_MAPPING + ): + skip_endpoint = True # if component not serializable, skip endpoint + break + if type in client_utils.SKIP_COMPONENTS: + continue + label = component["props"].get("label", f"parameter_{i}") + # The config has the most specific API info (taking into account the parameters + # of the component), so we use that if it exists. Otherwise, we fallback to the + # Serializer's API info. + serializer = serializing.COMPONENT_MAPPING[type]() + if component.get("api_info") and after_new_format: + info = component["api_info"] + example = component["example_inputs"]["serialized"] + else: + assert isinstance(serializer, serializing.Serializable) + info = serializer.api_info() + example = serializer.example_inputs()["raw"] + python_info = info["info"] + if serialize and info["serialized_info"]: + python_info = serializer.serialized_info() + if ( + isinstance(serializer, serializing.FileSerializable) + and component["props"].get("file_count", "single") != "single" + ): + python_info = serializer._multiple_file_serialized_info() + + python_type = client_utils.json_schema_to_python_type(python_info) + serializer_name = serializing.COMPONENT_MAPPING[type].__name__ + dependency_info["parameters"].append( + { + "label": label, + "type": info["info"], + "python_type": { + "type": python_type, + "description": python_info.get("description", ""), + }, + "component": type.capitalize(), + "example_input": example, + "serializer": serializer_name, + } + ) + + outputs = dependency["outputs"] + for o in outputs: + for component in config["components"]: + if component["id"] == o: + break + else: + skip_endpoint = True # if component not found, skip endpoint + break + type = component["type"] + if type in client_utils.SKIP_COMPONENTS: + continue + if ( + not component.get("serializer") + and type not in serializing.COMPONENT_MAPPING + ): + skip_endpoint = True # if component not serializable, skip endpoint + break + label = component["props"].get("label", f"value_{o}") + serializer = serializing.COMPONENT_MAPPING[type]() + if component.get("api_info") and after_new_format: + info = component["api_info"] + example = component["example_inputs"]["serialized"] + else: + assert isinstance(serializer, serializing.Serializable) + info = serializer.api_info() + example = serializer.example_inputs()["raw"] + python_info = info["info"] + if serialize and info["serialized_info"]: + python_info = serializer.serialized_info() + if ( + isinstance(serializer, serializing.FileSerializable) + and component["props"].get("file_count", "single") != "single" + ): + python_info = serializer._multiple_file_serialized_info() + python_type = client_utils.json_schema_to_python_type(python_info) + serializer_name = serializing.COMPONENT_MAPPING[type].__name__ + dependency_info["returns"].append( + { + "label": label, + "type": info["info"], + "python_type": { + "type": python_type, + "description": python_info.get("description", ""), + }, + "component": type.capitalize(), + "serializer": serializer_name, + } + ) + + if not dependency["backend_fn"]: + skip_endpoint = True + + if skip_endpoint: + continue + if dependency["api_name"] is not None and dependency["api_name"] is not False: + api_info["named_endpoints"][f"/{dependency['api_name']}"] = dependency_info + elif ( + dependency["api_name"] is False + or mode == "interface" + or mode == "tabbed_interface" + ): + pass # Skip unnamed endpoints in interface mode + else: + api_info["unnamed_endpoints"][str(d)] = dependency_info + + return api_info + + +@document("launch", "queue", "integrate", "load") +class Blocks(BlockContext): + """ + Blocks is Gradio's low-level API that allows you to create more custom web + applications and demos than Interfaces (yet still entirely in Python). + + + Compared to the Interface class, Blocks offers more flexibility and control over: + (1) the layout of components (2) the events that + trigger the execution of functions (3) data flows (e.g. inputs can trigger outputs, + which can trigger the next level of outputs). Blocks also offers ways to group + together related demos such as with tabs. + + + The basic usage of Blocks is as follows: create a Blocks object, then use it as a + context (with the "with" statement), and then define layouts, components, or events + within the Blocks context. Finally, call the launch() method to launch the demo. + + Example: + import gradio as gr + def update(name): + return f"Welcome to Gradio, {name}!" + + with gr.Blocks() as demo: + gr.Markdown("Start typing below and then click **Run** to see the output.") + with gr.Row(): + inp = gr.Textbox(placeholder="What is your name?") + out = gr.Textbox() + btn = gr.Button("Run") + btn.click(fn=update, inputs=inp, outputs=out) + + demo.launch() + Demos: blocks_hello, blocks_flipper, blocks_speech_text_sentiment, generate_english_german + Guides: blocks-and-event-listeners, controlling-layout, state-in-blocks, custom-CSS-and-JS, custom-interpretations-with-blocks, using-blocks-like-functions + """ + + def __init__( + self, + theme: Theme | str | None = None, + analytics_enabled: bool | None = None, + mode: str = "blocks", + title: str = "Gradio", + css: str | None = None, + **kwargs, + ): + """ + Parameters: + theme: a Theme object or a string representing a theme. If a string, will look for a built-in theme with that name (e.g. "soft" or "default"), or will attempt to load a theme from the HF Hub (e.g. "gradio/monochrome"). If None, will use the Default theme. + analytics_enabled: whether to allow basic telemetry. If None, will use GRADIO_ANALYTICS_ENABLED environment variable or default to True. + mode: a human-friendly name for the kind of Blocks or Interface being created. + title: The tab title to display when this is opened in a browser window. + css: custom css or path to custom css file to apply to entire Blocks + """ + self.limiter = None + if theme is None: + theme = DefaultTheme() + elif isinstance(theme, str): + if theme.lower() in BUILT_IN_THEMES: + theme = BUILT_IN_THEMES[theme.lower()] + else: + try: + theme = Theme.from_hub(theme) + except Exception as e: + warnings.warn(f"Cannot load {theme}. Caught Exception: {str(e)}") + theme = DefaultTheme() + if not isinstance(theme, Theme): + warnings.warn("Theme should be a class loaded from gradio.themes") + theme = DefaultTheme() + self.theme: Theme = theme + self.theme_css = theme._get_theme_css() + self.stylesheets = theme._stylesheets + self.encrypt = False + self.share = False + self.enable_queue = None + self.max_threads = 40 + self.pending_streams = defaultdict(dict) + self.show_error = True + if css is not None and os.path.exists(css): + with open(css) as css_file: + self.css = css_file.read() + else: + self.css = css + + # For analytics_enabled and allow_flagging: (1) first check for + # parameter, (2) check for env variable, (3) default to True/"manual" + self.analytics_enabled = ( + analytics_enabled + if analytics_enabled is not None + else analytics.analytics_enabled() + ) + if self.analytics_enabled: + if not wasm_utils.IS_WASM: + t = threading.Thread(target=analytics.version_check) + t.start() + else: + os.environ["HF_HUB_DISABLE_TELEMETRY"] = "True" + super().__init__(render=False, **kwargs) + self.blocks: dict[int, Block] = {} + self.fns: list[BlockFunction] = [] + self.dependencies = [] + self.mode = mode + + self.is_running = False + self.local_url = None + self.share_url = None + self.width = None + self.height = None + self.api_open = True + + self.space_id = utils.get_space() + self.favicon_path = None + self.auth = None + self.dev_mode = bool(os.getenv("GRADIO_WATCH_DIRS", False)) + self.app_id = random.getrandbits(64) + self.temp_file_sets = [] + self.title = title + self.show_api = True + + # Only used when an Interface is loaded from a config + self.predict = None + self.input_components = None + self.output_components = None + self.__name__ = None + self.api_mode = None + self.progress_tracking = None + self.ssl_verify = True + + self.allowed_paths = [] + self.blocked_paths = [] + self.root_path = os.environ.get("GRADIO_ROOT_PATH", "") + self.root_urls = set() + + if self.analytics_enabled: + is_custom_theme = not any( + self.theme.to_dict() == built_in_theme.to_dict() + for built_in_theme in BUILT_IN_THEMES.values() + ) + data = { + "mode": self.mode, + "custom_css": self.css is not None, + "theme": self.theme.name, + "is_custom_theme": is_custom_theme, + "version": get_package_version(), + } + analytics.initiated_analytics(data) + + @property + def _is_running_in_reload_thread(self): + from gradio.reload import reload_thread + + return getattr(reload_thread, "running_reload", False) + + @classmethod + def from_config( + cls, + config: dict, + fns: list[Callable], + root_url: str, + ) -> Blocks: + """ + Factory method that creates a Blocks from a config and list of functions. Used + internally by the gradio.external.load() method. + + Parameters: + config: a dictionary containing the configuration of the Blocks. + fns: a list of functions that are used in the Blocks. Must be in the same order as the dependencies in the config. + root_url: an external url to use as a root URL when serving files for components in the Blocks. + """ + config = copy.deepcopy(config) + components_config = config["components"] + for component_config in components_config: + # for backwards compatibility, extract style into props + if "style" in component_config["props"]: + component_config["props"].update(component_config["props"]["style"]) + del component_config["props"]["style"] + theme = config.get("theme", "default") + original_mapping: dict[int, Block] = {} + root_urls = {root_url} + + def get_block_instance(id: int) -> Block: + for block_config in components_config: + if block_config["id"] == id: + break + else: + raise ValueError(f"Cannot find block with id {id}") + cls = component_or_layout_class(block_config["type"]) + block_config["props"].pop("type", None) + block_config["props"].pop("name", None) + block_config["props"].pop("selectable", None) + + # If a Gradio app B is loaded into a Gradio app A, and B itself loads a + # Gradio app C, then the root_urls of the components in A need to be the + # URL of C, not B. The else clause below handles this case. + if block_config["props"].get("root_url") is None: + block_config["props"]["root_url"] = f"{root_url}/" + else: + root_urls.add(block_config["props"]["root_url"]) + + # We treat dataset components as a special case because they reference other components + # in the config. Instead of using the component string names, we use the component ids. + if ( + block_config["type"] == "dataset" + and "component_ids" in block_config["props"] + ): + block_config["props"].pop("components", None) + block_config["props"]["components"] = [ + original_mapping[c] for c in block_config["props"]["component_ids"] + ] + block_config["props"].pop("component_ids", None) + + # Any component has already processed its initial value, so we skip that step here + block = cls(**block_config["props"], _skip_init_processing=True) + return block + + def iterate_over_children(children_list): + for child_config in children_list: + id = child_config["id"] + block = get_block_instance(id) + + original_mapping[id] = block + + children = child_config.get("children") + if children is not None: + assert isinstance( + block, BlockContext + ), f"Invalid config, Block with id {id} has children but is not a BlockContext." + with block: + iterate_over_children(children) + + derived_fields = ["types"] + + with Blocks(theme=theme) as blocks: + # ID 0 should be the root Blocks component + original_mapping[0] = Context.root_block or blocks + + iterate_over_children(config["layout"]["children"]) + + first_dependency = None + + # add the event triggers + for dependency, fn in zip(config["dependencies"], fns): + # We used to add a "fake_event" to the config to cache examples + # without removing it. This was causing bugs in calling gr.load + # We fixed the issue by removing "fake_event" from the config in examples.py + # but we still need to skip these events when loading the config to support + # older demos + if "trigger" in dependency and dependency["trigger"] == "fake_event": + continue + for field in derived_fields: + dependency.pop(field, None) + + # older versions had a separate trigger field, but now it is part of the + # targets field + _targets = dependency.pop("targets") + trigger = dependency.pop("trigger", None) + targets = [ + getattr( + original_mapping[ + target if isinstance(target, int) else target[0] + ], + trigger if isinstance(target, int) else target[1], + ) + for target in _targets + ] + dependency.pop("backend_fn") + dependency.pop("documentation", None) + dependency["inputs"] = [ + original_mapping[i] for i in dependency["inputs"] + ] + dependency["outputs"] = [ + original_mapping[o] for o in dependency["outputs"] + ] + dependency.pop("status_tracker", None) + dependency["preprocess"] = False + dependency["postprocess"] = False + + dependency = blocks.set_event_trigger( + targets=targets, fn=fn, **dependency + )[0] + if first_dependency is None: + first_dependency = dependency + + # Allows some use of Interface-specific methods with loaded Spaces + if first_dependency and Context.root_block: + blocks.predict = [fns[0]] + blocks.input_components = [ + Context.root_block.blocks[i] for i in first_dependency["inputs"] + ] + blocks.output_components = [ + Context.root_block.blocks[o] for o in first_dependency["outputs"] + ] + blocks.__name__ = "Interface" + blocks.api_mode = True + + blocks.root_urls = root_urls + return blocks + + def __str__(self): + return self.__repr__() + + def __repr__(self): + num_backend_fns = len([d for d in self.dependencies if d["backend_fn"]]) + repr = f"Gradio Blocks instance: {num_backend_fns} backend functions" + repr += f"\n{'-' * len(repr)}" + for d, dependency in enumerate(self.dependencies): + if dependency["backend_fn"]: + repr += f"\nfn_index={d}" + repr += "\n inputs:" + for input_id in dependency["inputs"]: + block = self.blocks[input_id] + repr += f"\n |-{block}" + repr += "\n outputs:" + for output_id in dependency["outputs"]: + block = self.blocks[output_id] + repr += f"\n |-{block}" + return repr + + @property + def expects_oauth(self): + """Return whether the app expects user to authenticate via OAuth.""" + return any( + isinstance(block, (components.LoginButton, components.LogoutButton)) + for block in self.blocks.values() + ) + + def set_event_trigger( + self, + targets: Sequence[EventListenerMethod], + fn: Callable | None, + inputs: Component | list[Component] | set[Component] | None, + outputs: Component | list[Component] | None, + preprocess: bool = True, + postprocess: bool = True, + scroll_to_output: bool = False, + show_progress: str = "full", + api_name: str | None | Literal[False] = None, + js: str | None = None, + no_target: bool = False, + queue: bool | None = None, + batch: bool = False, + max_batch_size: int = 4, + cancels: list[int] | None = None, + every: float | None = None, + collects_event_data: bool | None = None, + trigger_after: int | None = None, + trigger_only_on_success: bool = False, + ) -> tuple[dict[str, Any], int]: + """ + Adds an event to the component's dependencies. + Parameters: + event_name: event name + fn: Callable function + inputs: input list + outputs: output list + preprocess: whether to run the preprocess methods of components + postprocess: whether to run the postprocess methods of components + scroll_to_output: whether to scroll to output of dependency on trigger + show_progress: whether to show progress animation while running. + api_name: defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, the endpoint will be exposed in the api docs as an unnamed endpoint, although this behavior will be changed in Gradio 4.0. If set to a string, the endpoint will be exposed in the api docs with the given name. + js: Experimental parameter (API may change): Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs' and 'outputs', return should be a list of values for output components + no_target: if True, sets "targets" to [], used for Blocks "load" event + queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app. + batch: whether this function takes in a batch of inputs + max_batch_size: the maximum batch size to send to the function + cancels: a list of other events to cancel when this event is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. + every: Run this event 'every' number of seconds while the client connection is open. Interpreted in seconds. Queue must be enabled. + collects_event_data: whether to collect event data for this event + trigger_after: if set, this event will be triggered after 'trigger_after' function index + trigger_only_on_success: if True, this event will only be triggered if the previous event was successful (only applies if `trigger_after` is set) + Returns: dependency information, dependency index + """ + # Support for singular parameter + _targets = [ + ( + target.trigger._id if target.trigger and not no_target else None, + target.event_name, + ) + for target in targets + ] + if isinstance(inputs, set): + inputs_as_dict = True + inputs = sorted(inputs, key=lambda x: x._id) + else: + inputs_as_dict = False + if inputs is None: + inputs = [] + elif not isinstance(inputs, list): + inputs = [inputs] + + if isinstance(outputs, set): + outputs = sorted(outputs, key=lambda x: x._id) + else: + if outputs is None: + outputs = [] + elif not isinstance(outputs, list): + outputs = [outputs] + + if fn is not None and not cancels: + check_function_inputs_match(fn, inputs, inputs_as_dict) + if every is not None and every <= 0: + raise ValueError("Parameter every must be positive or None") + if every and batch: + raise ValueError( + f"Cannot run event in a batch and every {every} seconds. " + "Either batch is True or every is non-zero but not both." + ) + + if every and fn: + fn = get_continuous_fn(fn, every) + elif every: + raise ValueError("Cannot set a value for `every` without a `fn`.") + + _, progress_index, event_data_index = ( + special_args(fn) if fn else (None, None, None) + ) + self.fns.append( + BlockFunction( + fn, + inputs, + outputs, + preprocess, + postprocess, + inputs_as_dict, + progress_index is not None, + ) + ) + if api_name is not None and api_name is not False: + api_name_ = utils.append_unique_suffix( + api_name, [dep["api_name"] for dep in self.dependencies] + ) + if api_name != api_name_: + warnings.warn(f"api_name {api_name} already exists, using {api_name_}") + api_name = api_name_ + + if collects_event_data is None: + collects_event_data = event_data_index is not None + + dependency = { + "targets": _targets, + "inputs": [block._id for block in inputs], + "outputs": [block._id for block in outputs], + "backend_fn": fn is not None, + "js": js, + "queue": False if fn is None else queue, + "api_name": api_name, + "scroll_to_output": False if utils.get_space() else scroll_to_output, + "show_progress": show_progress, + "every": every, + "batch": batch, + "max_batch_size": max_batch_size, + "cancels": cancels or [], + "types": { + "continuous": bool(every), + "generator": inspect.isgeneratorfunction(fn) or bool(every), + }, + "collects_event_data": collects_event_data, + "trigger_after": trigger_after, + "trigger_only_on_success": trigger_only_on_success, + } + self.dependencies.append(dependency) + return dependency, len(self.dependencies) - 1 + + def render(self): + if Context.root_block is not None: + if self._id in Context.root_block.blocks: + raise DuplicateBlockError( + f"A block with id: {self._id} has already been rendered in the current Blocks." + ) + overlapping_ids = set(Context.root_block.blocks).intersection(self.blocks) + for id in overlapping_ids: + # State components are allowed to be reused between Blocks + if not isinstance(self.blocks[id], components.State): + raise DuplicateBlockError( + "At least one block in this Blocks has already been rendered." + ) + + Context.root_block.blocks.update(self.blocks) + Context.root_block.fns.extend(self.fns) + dependency_offset = len(Context.root_block.dependencies) + for i, dependency in enumerate(self.dependencies): + api_name = dependency["api_name"] + if api_name is not None and api_name is not False: + api_name_ = utils.append_unique_suffix( + api_name, + [dep["api_name"] for dep in Context.root_block.dependencies], + ) + if api_name != api_name_: + warnings.warn( + f"api_name {api_name} already exists, using {api_name_}" + ) + dependency["api_name"] = api_name_ + dependency["cancels"] = [ + c + dependency_offset for c in dependency["cancels"] + ] + if dependency.get("trigger_after") is not None: + dependency["trigger_after"] += dependency_offset + # Recreate the cancel function so that it has the latest + # dependency fn indices. This is necessary to properly cancel + # events in the backend + if dependency["cancels"]: + updated_cancels = [ + Context.root_block.dependencies[i] + for i in dependency["cancels"] + ] + new_fn = BlockFunction( + get_cancel_function(updated_cancels)[0], + [], + [], + False, + True, + False, + ) + Context.root_block.fns[dependency_offset + i] = new_fn + Context.root_block.dependencies.append(dependency) + Context.root_block.temp_file_sets.extend(self.temp_file_sets) + Context.root_block.root_urls.update(self.root_urls) + + if Context.block is not None: + Context.block.children.extend(self.children) + return self + + def is_callable(self, fn_index: int = 0) -> bool: + """Checks if a particular Blocks function is callable (i.e. not stateful or a generator).""" + block_fn = self.fns[fn_index] + dependency = self.dependencies[fn_index] + + if inspect.isasyncgenfunction(block_fn.fn): + return False + if inspect.isgeneratorfunction(block_fn.fn): + return False + for input_id in dependency["inputs"]: + block = self.blocks[input_id] + if getattr(block, "stateful", False): + return False + for output_id in dependency["outputs"]: + block = self.blocks[output_id] + if getattr(block, "stateful", False): + return False + + return True + + def __call__(self, *inputs, fn_index: int = 0, api_name: str | None = None): + """ + Allows Blocks objects to be called as functions. Supply the parameters to the + function as positional arguments. To choose which function to call, use the + fn_index parameter, which must be a keyword argument. + + Parameters: + *inputs: the parameters to pass to the function + fn_index: the index of the function to call (defaults to 0, which for Interfaces, is the default prediction function) + api_name: The api_name of the dependency to call. Will take precedence over fn_index. + """ + if api_name is not None: + inferred_fn_index = next( + ( + i + for i, d in enumerate(self.dependencies) + if d.get("api_name") == api_name + ), + None, + ) + if inferred_fn_index is None: + raise InvalidApiNameError( + f"Cannot find a function with api_name {api_name}" + ) + fn_index = inferred_fn_index + if not (self.is_callable(fn_index)): + raise ValueError( + "This function is not callable because it is either stateful or is a generator. Please use the .launch() method instead to create an interactive user interface." + ) + + inputs = list(inputs) + processed_inputs = self.serialize_data(fn_index, inputs) + batch = self.dependencies[fn_index]["batch"] + if batch: + processed_inputs = [[inp] for inp in processed_inputs] + + outputs = client_utils.synchronize_async( + self.process_api, + fn_index=fn_index, + inputs=processed_inputs, + request=None, + state={}, + ) + outputs = outputs["data"] + + if batch: + outputs = [out[0] for out in outputs] + + processed_outputs = self.deserialize_data(fn_index, outputs) + processed_outputs = utils.resolve_singleton(processed_outputs) + + return processed_outputs + + async def call_function( + self, + fn_index: int, + processed_input: list[Any], + iterator: AsyncIterator[Any] | None = None, + requests: routes.Request | list[routes.Request] | None = None, + event_id: str | None = None, + event_data: EventData | None = None, + in_event_listener: bool = False, + ): + """ + Calls function with given index and preprocessed input, and measures process time. + Parameters: + fn_index: index of function to call + processed_input: preprocessed input to pass to function + iterator: iterator to use if function is a generator + requests: requests to pass to function + event_id: id of event in queue + event_data: data associated with event trigger + """ + block_fn = self.fns[fn_index] + assert block_fn.fn, f"function with index {fn_index} not defined." + is_generating = False + request = requests[0] if isinstance(requests, list) else requests + start = time.time() + + fn = utils.get_function_with_locals( + fn=block_fn.fn, + blocks=self, + event_id=event_id, + in_event_listener=in_event_listener, + request=request, + ) + + if iterator is None: # If not a generator function that has already run + if block_fn.inputs_as_dict: + processed_input = [dict(zip(block_fn.inputs, processed_input))] + + processed_input, progress_index, _ = special_args( + block_fn.fn, processed_input, request, event_data + ) + progress_tracker = ( + processed_input[progress_index] if progress_index is not None else None + ) + + if progress_tracker is not None and progress_index is not None: + progress_tracker, fn = create_tracker( + self, event_id, fn, progress_tracker.track_tqdm + ) + processed_input[progress_index] = progress_tracker + + if inspect.iscoroutinefunction(fn): + prediction = await fn(*processed_input) + else: + prediction = await anyio.to_thread.run_sync( + fn, *processed_input, limiter=self.limiter + ) + else: + prediction = None + + if inspect.isgeneratorfunction(fn) or inspect.isasyncgenfunction(fn): + if not self.enable_queue: + raise ValueError("Need to enable queue to use generators.") + try: + if iterator is None: + iterator = cast(AsyncIterator[Any], prediction) + if inspect.isgenerator(iterator): + iterator = utils.SyncToAsyncIterator(iterator, self.limiter) + prediction = await utils.async_iteration(iterator) + is_generating = True + except StopAsyncIteration: + n_outputs = len(self.dependencies[fn_index].get("outputs")) + prediction = ( + components._Keywords.FINISHED_ITERATING + if n_outputs == 1 + else (components._Keywords.FINISHED_ITERATING,) * n_outputs + ) + iterator = None + + duration = time.time() - start + + return { + "prediction": prediction, + "duration": duration, + "is_generating": is_generating, + "iterator": iterator, + } + + def serialize_data(self, fn_index: int, inputs: list[Any]) -> list[Any]: + dependency = self.dependencies[fn_index] + processed_input = [] + + for i, input_id in enumerate(dependency["inputs"]): + try: + block = self.blocks[input_id] + except KeyError as e: + raise InvalidBlockError( + f"Input component with id {input_id} used in {dependency['trigger']}() event is not defined in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events." + ) from e + assert isinstance( + block, components.IOComponent + ), f"{block.__class__} Component with id {input_id} not a valid input component." + serialized_input = block.serialize(inputs[i]) + processed_input.append(serialized_input) + + return processed_input + + def deserialize_data(self, fn_index: int, outputs: list[Any]) -> list[Any]: + dependency = self.dependencies[fn_index] + predictions = [] + + for o, output_id in enumerate(dependency["outputs"]): + try: + block = self.blocks[output_id] + except KeyError as e: + raise InvalidBlockError( + f"Output component with id {output_id} used in {dependency['trigger']}() event not found in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events." + ) from e + assert isinstance( + block, components.IOComponent + ), f"{block.__class__} Component with id {output_id} not a valid output component." + deserialized = block.deserialize( + outputs[o], + save_dir=block.DEFAULT_TEMP_DIR, + root_url=block.root_url, + hf_token=Context.hf_token, + ) + predictions.append(deserialized) + + return predictions + + def validate_inputs(self, fn_index: int, inputs: list[Any]): + block_fn = self.fns[fn_index] + dependency = self.dependencies[fn_index] + + dep_inputs = dependency["inputs"] + + # This handles incorrect inputs when args are changed by a JS function + # Only check not enough args case, ignore extra arguments (for now) + # TODO: make this stricter? + if len(inputs) < len(dep_inputs): + name = ( + f" ({block_fn.name})" + if block_fn.name and block_fn.name != "" + else "" + ) + + wanted_args = [] + received_args = [] + for input_id in dep_inputs: + block = self.blocks[input_id] + wanted_args.append(str(block)) + for inp in inputs: + v = f'"{inp}"' if isinstance(inp, str) else str(inp) + received_args.append(v) + + wanted = ", ".join(wanted_args) + received = ", ".join(received_args) + + # JS func didn't pass enough arguments + raise ValueError( + f"""An event handler{name} didn't receive enough input values (needed: {len(dep_inputs)}, got: {len(inputs)}). +Check if the event handler calls a Javascript function, and make sure its return value is correct. +Wanted inputs: + [{wanted}] +Received inputs: + [{received}]""" + ) + + def preprocess_data( + self, fn_index: int, inputs: list[Any], state: SessionState | None + ): + state = state or SessionState(self) + block_fn = self.fns[fn_index] + dependency = self.dependencies[fn_index] + + self.validate_inputs(fn_index, inputs) + + if block_fn.preprocess: + processed_input = [] + for i, input_id in enumerate(dependency["inputs"]): + try: + block = self.blocks[input_id] + except KeyError as e: + raise InvalidBlockError( + f"Input component with id {input_id} used in {dependency['trigger']}() event not found in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events." + ) from e + assert isinstance( + block, components.Component + ), f"{block.__class__} Component with id {input_id} not a valid input component." + if getattr(block, "stateful", False): + processed_input.append(state[input_id]) + else: + if input_id in state: + block = state[input_id] + processed_input.append(block.preprocess(inputs[i])) + else: + processed_input = inputs + return processed_input + + def validate_outputs(self, fn_index: int, predictions: Any | list[Any]): + block_fn = self.fns[fn_index] + dependency = self.dependencies[fn_index] + + dep_outputs = dependency["outputs"] + + if type(predictions) is not list and type(predictions) is not tuple: + predictions = [predictions] + + if len(predictions) < len(dep_outputs): + name = ( + f" ({block_fn.name})" + if block_fn.name and block_fn.name != "" + else "" + ) + + wanted_args = [] + received_args = [] + for output_id in dep_outputs: + block = self.blocks[output_id] + wanted_args.append(str(block)) + for pred in predictions: + v = f'"{pred}"' if isinstance(pred, str) else str(pred) + received_args.append(v) + + wanted = ", ".join(wanted_args) + received = ", ".join(received_args) + + raise ValueError( + f"""An event handler{name} didn't receive enough output values (needed: {len(dep_outputs)}, received: {len(predictions)}). +Wanted outputs: + [{wanted}] +Received outputs: + [{received}]""" + ) + + def postprocess_data( + self, fn_index: int, predictions: list | dict, state: SessionState | None + ): + state = state or SessionState(self) + block_fn = self.fns[fn_index] + dependency = self.dependencies[fn_index] + batch = dependency["batch"] + + if type(predictions) is dict and len(predictions) > 0: + predictions = convert_component_dict_to_list( + dependency["outputs"], predictions + ) + + if len(dependency["outputs"]) == 1 and not (batch): + predictions = [ + predictions, + ] + + self.validate_outputs(fn_index, predictions) # type: ignore + + output = [] + for i, output_id in enumerate(dependency["outputs"]): + try: + if predictions[i] is components._Keywords.FINISHED_ITERATING: + output.append(None) + continue + except (IndexError, KeyError) as err: + raise ValueError( + "Number of output components does not match number " + f"of values returned from from function {block_fn.name}" + ) from err + + try: + block = self.blocks[output_id] + except KeyError as e: + raise InvalidBlockError( + f"Output component with id {output_id} used in {dependency['trigger']}() event not found in this gr.Blocks context. You are allowed to nest gr.Blocks contexts, but there must be a gr.Blocks context that contains all components and events." + ) from e + + if getattr(block, "stateful", False): + if not utils.is_update(predictions[i]): + state[output_id] = predictions[i] + output.append(None) + else: + prediction_value = predictions[i] + if utils.is_update( + prediction_value + ): # if update is passed directly (deprecated), remove Nones + prediction_value = utils.delete_none( + prediction_value, skip_value=True + ) + + if isinstance(prediction_value, Block): + prediction_value = prediction_value.constructor_args + prediction_value["__type__"] = "update" + if utils.is_update(prediction_value): + if output_id in state: + args = state[output_id].constructor_args.copy() + else: + args = self.blocks[output_id].constructor_args.copy() + args.update(prediction_value) + args.pop("value", None) + args.pop("__type__") + args["render"] = False + args["_skip_init_processing"] = not block_fn.postprocess + state[output_id] = self.blocks[output_id].__class__(**args) + + assert isinstance(prediction_value, dict) + prediction_value = postprocess_update_dict( + block=state[output_id], + update_dict=prediction_value, + postprocess=block_fn.postprocess, + ) + elif block_fn.postprocess: + assert isinstance( + block, components.Component + ), f"{block.__class__} Component with id {output_id} not a valid output component." + prediction_value = block.postprocess(prediction_value) + output.append(prediction_value) + + return output + + def handle_streaming_outputs( + self, + fn_index: int, + data: list, + session_hash: str | None, + run: int | None, + ) -> list: + if session_hash is None or run is None: + return data + if run not in self.pending_streams[session_hash]: + self.pending_streams[session_hash][run] = {} + stream_run = self.pending_streams[session_hash][run] + + from gradio.events import StreamableOutput + + for i, output_id in enumerate(self.dependencies[fn_index]["outputs"]): + block = self.blocks[output_id] + if isinstance(block, StreamableOutput) and block.streaming: + first_chunk = output_id not in stream_run + binary_data, output_data = block.stream_output( + data[i], f"{session_hash}/{run}/{output_id}", first_chunk + ) + if first_chunk: + stream_run[output_id] = [] + self.pending_streams[session_hash][run][output_id].append(binary_data) + data[i] = output_data + return data + + async def process_api( + self, + fn_index: int, + inputs: list[Any], + state: SessionState | None = None, + request: routes.Request | list[routes.Request] | None = None, + iterators: dict[int, Any] | None = None, + session_hash: str | None = None, + event_id: str | None = None, + event_data: EventData | None = None, + in_event_listener: bool = True, + ) -> dict[str, Any]: + """ + Processes API calls from the frontend. First preprocesses the data, + then runs the relevant function, then postprocesses the output. + Parameters: + fn_index: Index of function to run. + inputs: input data received from the frontend + state: data stored from stateful components for session (key is input block id) + request: the gr.Request object containing information about the network request (e.g. IP address, headers, query parameters, username) + iterators: the in-progress iterators for each generator function (key is function index) + event_id: id of event that triggered this API call + event_data: data associated with the event trigger itself + Returns: None + """ + block_fn = self.fns[fn_index] + batch = self.dependencies[fn_index]["batch"] + + if batch: + max_batch_size = self.dependencies[fn_index]["max_batch_size"] + batch_sizes = [len(inp) for inp in inputs] + batch_size = batch_sizes[0] + if inspect.isasyncgenfunction(block_fn.fn) or inspect.isgeneratorfunction( + block_fn.fn + ): + raise ValueError("Gradio does not support generators in batch mode.") + if not all(x == batch_size for x in batch_sizes): + raise ValueError( + f"All inputs to a batch function must have the same length but instead have sizes: {batch_sizes}." + ) + if batch_size > max_batch_size: + raise ValueError( + f"Batch size ({batch_size}) exceeds the max_batch_size for this function ({max_batch_size})" + ) + + inputs = [ + self.preprocess_data(fn_index, list(i), state) for i in zip(*inputs) + ] + result = await self.call_function( + fn_index, + list(zip(*inputs)), + None, + request, + event_id, + event_data, + in_event_listener, + ) + preds = result["prediction"] + data = [ + self.postprocess_data(fn_index, list(o), state) for o in zip(*preds) + ] + data = list(zip(*data)) + is_generating, iterator = None, None + else: + old_iterator = iterators.get(fn_index, None) if iterators else None + if old_iterator: + inputs = [] + else: + inputs = self.preprocess_data(fn_index, inputs, state) + was_generating = old_iterator is not None + result = await self.call_function( + fn_index, + inputs, + old_iterator, + request, + event_id, + event_data, + in_event_listener, + ) + data = self.postprocess_data(fn_index, result["prediction"], state) + is_generating, iterator = result["is_generating"], result["iterator"] + if is_generating or was_generating: + data = self.handle_streaming_outputs( + fn_index, + data, + session_hash=session_hash, + run=id(old_iterator) if was_generating else id(iterator), + ) + + block_fn.total_runtime += result["duration"] + block_fn.total_runs += 1 + return { + "data": data, + "is_generating": is_generating, + "iterator": iterator, + "duration": result["duration"], + "average_duration": block_fn.total_runtime / block_fn.total_runs, + } + + def create_limiter(self): + self.limiter = ( + None + if self.max_threads == 40 + else CapacityLimiter(total_tokens=self.max_threads) + ) + + def get_config(self): + return {"type": "column"} + + def get_config_file(self): + config = { + "version": routes.VERSION, + "mode": self.mode, + "app_id": self.app_id, + "dev_mode": self.dev_mode, + "analytics_enabled": self.analytics_enabled, + "components": [], + "css": self.css, + "title": self.title or "Gradio", + "space_id": self.space_id, + "enable_queue": getattr(self, "enable_queue", False), # launch attributes + "show_error": getattr(self, "show_error", False), + "show_api": self.show_api, + "is_colab": utils.colab_check(), + "stylesheets": self.stylesheets, + "theme": self.theme.name, + } + + def get_layout(block): + if not isinstance(block, BlockContext): + return {"id": block._id} + children_layout = [] + for child in block.children: + children_layout.append(get_layout(child)) + return {"id": block._id, "children": children_layout} + + config["layout"] = get_layout(self) + + for _id, block in self.blocks.items(): + props = block.get_config() if hasattr(block, "get_config") else {} + block_config = { + "id": _id, + "type": block.get_block_name(), + "props": utils.delete_none(props), + } + serializer = utils.get_serializer_name(block) + if serializer: + assert isinstance(block, serializing.Serializable) + block_config["serializer"] = serializer + block_config["api_info"] = block.api_info() # type: ignore + block_config["example_inputs"] = block.example_inputs() # type: ignore + config["components"].append(block_config) + config["dependencies"] = self.dependencies + return config + + def __enter__(self): + if Context.block is None: + Context.root_block = self + self.parent = Context.block + Context.block = self + self.exited = False + return self + + def __exit__(self, exc_type: type[BaseException] | None = None, *args): + if exc_type is not None: + Context.block = None + Context.root_block = None + return + super().fill_expected_parents() + Context.block = self.parent + # Configure the load events before root_block is reset + self.attach_load_events() + if self.parent is None: + Context.root_block = None + else: + self.parent.children.extend(self.children) + self.config = self.get_config_file() + self.app = routes.App.create_app(self) + self.progress_tracking = any(block_fn.tracks_progress for block_fn in self.fns) + self.exited = True + + def load( + self: Blocks | None = None, + fn: Callable | None = None, + inputs: list[Component] | None = None, + outputs: list[Component] | None = None, + api_name: str | None | Literal[False] = None, + scroll_to_output: bool = False, + show_progress: str = "full", + queue=None, + batch: bool = False, + max_batch_size: int = 4, + preprocess: bool = True, + postprocess: bool = True, + every: float | None = None, + _js: str | None = None, + *, + name: str | None = None, + src: str | None = None, + api_key: str | None = None, + alias: str | None = None, + **kwargs, + ) -> Blocks | dict[str, Any] | None: + """ + For reverse compatibility reasons, this is both a class method and an instance + method, the two of which, confusingly, do two completely different things. + + Class method: loads a demo from a Hugging Face Spaces repo and creates it locally and returns a block instance. Warning: this method will be deprecated. Use the equivalent `gradio.load()` instead. + + Instance method: adds event that runs as soon as the demo loads in the browser. Example usage below. + Parameters: + name: Class Method - the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base") + src: Class Method - the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`) + api_key: Class Method - optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens. Warning: only provide this if you are loading a trusted private Space as it can be read by the Space you are loading. + alias: Class Method - optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x) + fn: Instance Method - the function to wrap an interface around. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component. + inputs: Instance Method - List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list. + outputs: Instance Method - List of gradio.components to use as inputs. If the function returns no outputs, this should be an empty list. + api_name: Instance Method - Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, the endpoint will be exposed in the api docs as an unnamed endpoint, although this behavior will be changed in Gradio 4.0. If set to a string, the endpoint will be exposed in the api docs with the given name. + scroll_to_output: Instance Method - If True, will scroll to output component on completion + show_progress: Instance Method - If True, will show progress animation while pending + queue: Instance Method - If True, will place the request on the queue, if the queue exists + batch: Instance Method - If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component. + max_batch_size: Instance Method - Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True) + preprocess: Instance Method - If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component). + postprocess: Instance Method - If False, will not run postprocessing of component data before returning 'fn' output to the browser. + every: Instance Method - Run this event 'every' number of seconds. Interpreted in seconds. Queue must be enabled. + Example: + import gradio as gr + import datetime + with gr.Blocks() as demo: + def get_time(): + return datetime.datetime.now().time() + dt = gr.Textbox(label="Current time") + demo.load(get_time, inputs=None, outputs=dt) + demo.launch() + """ + if self is None: + warn_deprecation( + "gr.Blocks.load() will be deprecated. Use gr.load() instead." + ) + if name is None: + raise ValueError( + "Blocks.load() requires passing parameters as keyword arguments" + ) + return external.load( + name=name, src=src, hf_token=api_key, alias=alias, **kwargs + ) + else: + from gradio.events import Dependency, EventListenerMethod + + if Context.root_block is None: + raise AttributeError( + "Cannot call load() outside of a gradio.Blocks context." + ) + + dep, dep_index = Context.root_block.set_event_trigger( + targets=[EventListenerMethod(self, "load")], + fn=fn, + inputs=inputs, + outputs=outputs, + api_name=api_name, + preprocess=preprocess, + postprocess=postprocess, + scroll_to_output=scroll_to_output, + show_progress=show_progress, + js=_js, + queue=queue, + batch=batch, + max_batch_size=max_batch_size, + every=every, + no_target=True, + ) + return Dependency(dep, dep_index, fn) + + def clear(self): + """Resets the layout of the Blocks object.""" + self.blocks = {} + self.fns = [] + self.dependencies = [] + self.children = [] + return self + + @concurrency_count_warning + @document() + def queue( + self, + concurrency_count: int = 1, + status_update_rate: float | Literal["auto"] = "auto", + client_position_to_load_data: int | None = None, + default_enabled: bool | None = None, + api_open: bool = True, + max_size: int | None = None, + ): + """ + By enabling the queue you can control the rate of processed requests, let users know their position in the queue, and set a limit on maximum number of events allowed. + Parameters: + concurrency_count: Number of worker threads that will be processing requests from the queue concurrently. Increasing this number will increase the rate at which requests are processed, but will also increase the memory usage of the queue. + status_update_rate: If "auto", Queue will send status estimations to all clients whenever a job is finished. Otherwise Queue will send status at regular intervals set by this parameter as the number of seconds. + client_position_to_load_data: DEPRECATED. This parameter is deprecated and has no effect. + default_enabled: Deprecated and has no effect. + api_open: If True, the REST routes of the backend will be open, allowing requests made directly to those endpoints to skip the queue. + max_size: The maximum number of events the queue will store at any given moment. If the queue is full, new events will not be added and a user will receive a message saying that the queue is full. If None, the queue size will be unlimited. + Example: (Blocks) + with gr.Blocks() as demo: + button = gr.Button(label="Generate Image") + button.click(fn=image_generator, inputs=gr.Textbox(), outputs=gr.Image()) + demo.queue(max_size=10) + demo.launch() + Example: (Interface) + demo = gr.Interface(image_generator, gr.Textbox(), gr.Image()) + demo.queue(max_size=20) + demo.launch() + """ + if default_enabled is not None: + warn_deprecation( + "The default_enabled parameter of queue has no effect and will be removed " + "in a future version of gradio." + ) + self.enable_queue = True + self.api_open = api_open + if client_position_to_load_data is not None: + warn_deprecation( + "The client_position_to_load_data parameter is deprecated." + ) + if utils.is_zero_gpu_space(): + concurrency_count = self.max_threads + max_size = 1 if max_size is None else max_size + self._queue = queueing.Queue( + live_updates=status_update_rate == "auto", + concurrency_count=concurrency_count, + update_intervals=status_update_rate if status_update_rate != "auto" else 1, + max_size=max_size, + blocks_dependencies=self.dependencies, + ) + self.config = self.get_config_file() + self.app = routes.App.create_app(self) + return self + + def validate_queue_settings(self): + if not self.enable_queue and self.progress_tracking: + raise ValueError("Progress tracking requires queuing to be enabled.") + + for fn_index, dep in enumerate(self.dependencies): + if not self.enable_queue and self.queue_enabled_for_fn(fn_index): + raise ValueError( + f"The queue is enabled for event {dep['api_name'] if dep['api_name'] else fn_index} " + "but the queue has not been enabled for the app. Please call .queue() " + "on your app. Consult https://gradio.app/docs/#blocks-queue for information on how " + "to configure the queue." + ) + for i in dep["cancels"]: + if not self.queue_enabled_for_fn(i): + raise ValueError( + "Queue needs to be enabled! " + "You may get this error by either 1) passing a function that uses the yield keyword " + "into an interface without enabling the queue or 2) defining an event that cancels " + "another event without enabling the queue. Both can be solved by calling .queue() " + "before .launch()" + ) + if dep["batch"] and ( + dep["queue"] is False + or (dep["queue"] is None and not self.enable_queue) + ): + raise ValueError("In order to use batching, the queue must be enabled.") + + def launch( + self, + inline: bool | None = None, + inbrowser: bool = False, + share: bool | None = None, + debug: bool = False, + enable_queue: bool | None = None, + max_threads: int = 40, + auth: Callable | tuple[str, str] | list[tuple[str, str]] | None = None, + auth_message: str | None = None, + prevent_thread_lock: bool = False, + show_error: bool = False, + server_name: str | None = None, + server_port: int | None = None, + show_tips: bool = False, + height: int = 500, + width: int | str = "100%", + encrypt: bool | None = None, + favicon_path: str | None = None, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_keyfile_password: str | None = None, + ssl_verify: bool = True, + quiet: bool = False, + show_api: bool = True, + file_directories: list[str] | None = None, + allowed_paths: list[str] | None = None, + blocked_paths: list[str] | None = None, + root_path: str | None = None, + _frontend: bool = True, + app_kwargs: dict[str, Any] | None = None, + state_session_capacity: int = 10000, + ) -> tuple[FastAPI, str, str]: + """ + Launches a simple web server that serves the demo. Can also be used to create a + public link used by anyone to access the demo from their browser by setting share=True. + + Parameters: + inline: whether to display in the interface inline in an iframe. Defaults to True in python notebooks; False otherwise. + inbrowser: whether to automatically launch the interface in a new tab on the default browser. + share: whether to create a publicly shareable link for the interface. Creates an SSH tunnel to make your UI accessible from anywhere. If not provided, it is set to False by default every time, except when running in Google Colab. When localhost is not accessible (e.g. Google Colab), setting share=False is not supported. + debug: if True, blocks the main thread from running. If running in Google Colab, this is needed to print the errors in the cell output. + auth: If provided, username and password (or list of username-password tuples) required to access interface. Can also provide function that takes username and password and returns True if valid login. + auth_message: If provided, HTML message provided on login page. + prevent_thread_lock: If True, the interface will block the main thread while the server is running. + show_error: If True, any errors in the interface will be displayed in an alert modal and printed in the browser console log + server_port: will start gradio app on this port (if available). Can be set by environment variable GRADIO_SERVER_PORT. If None, will search for an available port starting at 7860. + server_name: to make app accessible on local network, set this to "0.0.0.0". Can be set by environment variable GRADIO_SERVER_NAME. If None, will use "127.0.0.1". + show_tips: if True, will occasionally show tips about new Gradio features + enable_queue: DEPRECATED (use .queue() method instead.) if True, inference requests will be served through a queue instead of with parallel threads. Required for longer inference times (> 1min) to prevent timeout. The default option in HuggingFace Spaces is True. The default option elsewhere is False. + max_threads: the maximum number of total threads that the Gradio app can generate in parallel. The default is inherited from the starlette library (currently 40). Applies whether the queue is enabled or not. But if queuing is enabled, this parameter is increaseed to be at least the concurrency_count of the queue. + width: The width in pixels of the iframe element containing the interface (used if inline=True) + height: The height in pixels of the iframe element containing the interface (used if inline=True) + encrypt: DEPRECATED. Has no effect. + favicon_path: If a path to a file (.png, .gif, or .ico) is provided, it will be used as the favicon for the web page. + ssl_keyfile: If a path to a file is provided, will use this as the private key file to create a local server running on https. + ssl_certfile: If a path to a file is provided, will use this as the signed certificate for https. Needs to be provided if ssl_keyfile is provided. + ssl_keyfile_password: If a password is provided, will use this with the ssl certificate for https. + ssl_verify: If False, skips certificate validation which allows self-signed certificates to be used. + quiet: If True, suppresses most print statements. + show_api: If True, shows the api docs in the footer of the app. Default True. + file_directories: This parameter has been renamed to `allowed_paths`. It will be removed in a future version. + allowed_paths: List of complete filepaths or parent directories that gradio is allowed to serve (in addition to the directory containing the gradio python file). Must be absolute paths. Warning: if you provide directories, any files in these directories or their subdirectories are accessible to all users of your app. + blocked_paths: List of complete filepaths or parent directories that gradio is not allowed to serve (i.e. users of your app are not allowed to access). Must be absolute paths. Warning: takes precedence over `allowed_paths` and all other directories exposed by Gradio by default. + root_path: The root path (or "mount point") of the application, if it's not served from the root ("/") of the domain. Often used when the application is behind a reverse proxy that forwards requests to the application. For example, if the application is served at "https://example.com/myapp", the `root_path` should be set to "/myapp". Can be set by environment variable GRADIO_ROOT_PATH. Defaults to "". + app_kwargs: Additional keyword arguments to pass to the underlying FastAPI app as a dictionary of parameter keys and argument values. For example, `{"docs_url": "/docs"}` + state_session_capacity: The maximum number of sessions whose information to store in memory. If the number of sessions exceeds this number, the oldest sessions will be removed. Reduce capacity to reduce memory usage when using gradio.State or returning updated components from functions. Defaults to 10000. + Returns: + app: FastAPI app object that is running the demo + local_url: Locally accessible link to the demo + share_url: Publicly accessible link to the demo (if share=True, otherwise None) + Example: (Blocks) + import gradio as gr + def reverse(text): + return text[::-1] + with gr.Blocks() as demo: + button = gr.Button(value="Reverse") + button.click(reverse, gr.Textbox(), gr.Textbox()) + demo.launch(share=True, auth=("username", "password")) + Example: (Interface) + import gradio as gr + def reverse(text): + return text[::-1] + demo = gr.Interface(reverse, "text", "text") + demo.launch(share=True, auth=("username", "password")) + """ + if self._is_running_in_reload_thread: + # We have already launched the demo + return None, None, None # type: ignore + + if not self.exited: + self.__exit__() + + if ( + auth + and not callable(auth) + and not isinstance(auth[0], tuple) + and not isinstance(auth[0], list) + ): + self.auth = [auth] + else: + self.auth = auth + self.auth_message = auth_message + self.show_tips = show_tips + self.show_error = show_error + self.height = height + self.width = width + self.favicon_path = favicon_path + self.ssl_verify = ssl_verify + self.state_session_capacity = state_session_capacity + if root_path is None: + self.root_path = os.environ.get("GRADIO_ROOT_PATH", "") + else: + self.root_path = root_path + + if enable_queue is not None: + self.enable_queue = enable_queue + warn_deprecation( + "The `enable_queue` parameter has been deprecated. " + "Please use the `.queue()` method instead.", + ) + if encrypt is not None: + warn_deprecation( + "The `encrypt` parameter has been deprecated and has no effect.", + ) + + if self.space_id: + self.enable_queue = self.enable_queue is not False + else: + self.enable_queue = self.enable_queue is True + if self.enable_queue and not hasattr(self, "_queue"): + self.queue() + + self.show_api = show_api + + if file_directories is not None: + warn_deprecation( + "The `file_directories` parameter has been renamed to `allowed_paths`. " + "Please use that instead.", + ) + if allowed_paths is None: + allowed_paths = file_directories + self.allowed_paths = allowed_paths or [] + self.blocked_paths = blocked_paths or [] + + if not isinstance(self.allowed_paths, list): + raise ValueError("`allowed_paths` must be a list of directories.") + if not isinstance(self.blocked_paths, list): + raise ValueError("`blocked_paths` must be a list of directories.") + + self.validate_queue_settings() + + self.config = self.get_config_file() + self.max_threads = max( + self._queue.max_thread_count if self.enable_queue else 0, max_threads + ) + + if self.is_running: + assert isinstance( + self.local_url, str + ), f"Invalid local_url: {self.local_url}" + if not (quiet): + print( + "Rerunning server... use `close()` to stop if you need to change `launch()` parameters.\n----" + ) + else: + if wasm_utils.IS_WASM: + server_name = "xxx" + server_port = 99999 + local_url = "" + server = None + + # In the Wasm environment, we only need the app object + # which the frontend app will directly communicate with through the Worker API, + # and we don't need to start a server. + # So we just create the app object and register it here, + # and avoid using `networking.start_server` that would start a server that don't work in the Wasm env. + from gradio.routes import App + + app = App.create_app(self, app_kwargs=app_kwargs) + wasm_utils.register_app(app) + else: + ( + server_name, + server_port, + local_url, + app, + server, + ) = networking.start_server( + self, + server_name, + server_port, + ssl_keyfile, + ssl_certfile, + ssl_keyfile_password, + app_kwargs=app_kwargs, + ) + self.server_name = server_name + self.local_url = local_url + self.server_port = server_port + self.server_app = ( + self.app + ) = app # server_app is included for backwards compatibility + self.server = server + self.is_running = True + self.is_colab = utils.colab_check() + self.is_kaggle = utils.kaggle_check() + + self.protocol = ( + "https" + if self.local_url.startswith("https") or self.is_colab + else "http" + ) + if not wasm_utils.IS_WASM and not self.is_colab: + print( + strings.en["RUNNING_LOCALLY_SEPARATED"].format( + self.protocol, self.server_name, self.server_port + ) + ) + + if self.enable_queue: + self._queue.set_server_app(self.server_app) + + if not wasm_utils.IS_WASM: + # Cannot run async functions in background other than app's scope. + # Workaround by triggering the app endpoint + requests.get(f"{self.local_url}startup-events", verify=ssl_verify) + else: + pass + # TODO: Call the startup endpoint in the Wasm env too. + + utils.launch_counter() + self.is_sagemaker = utils.sagemaker_check() + if share is None: + if self.is_colab and self.enable_queue: + if not quiet: + print( + "Setting queue=True in a Colab notebook requires sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n" + ) + self.share = True + elif self.is_kaggle: + if not quiet: + print( + "Kaggle notebooks require sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n" + ) + self.share = True + elif self.is_sagemaker: + if not quiet: + print( + "Sagemaker notebooks may require sharing enabled. Setting `share=True` (you can turn this off by setting `share=False` in `launch()` explicitly).\n" + ) + self.share = True + else: + self.share = False + else: + self.share = share + + # If running in a colab or not able to access localhost, + # a shareable link must be created. + if ( + _frontend + and not wasm_utils.IS_WASM + and not networking.url_ok(self.local_url) + and not self.share + ): + raise ValueError( + "When localhost is not accessible, a shareable link must be created. Please set share=True or check your proxy settings to allow access to localhost." + ) + + if self.is_colab: + if not quiet: + if debug: + print(strings.en["COLAB_DEBUG_TRUE"]) + else: + print(strings.en["COLAB_DEBUG_FALSE"]) + if not self.share: + print(strings.en["COLAB_WARNING"].format(self.server_port)) + if self.enable_queue and not self.share: + raise ValueError( + "When using queueing in Colab, a shareable link must be created. Please set share=True." + ) + + if self.share: + if self.space_id: + warnings.warn( + "Setting share=True is not supported on Hugging Face Spaces" + ) + self.share = False + if wasm_utils.IS_WASM: + warnings.warn( + "Setting share=True is not supported in the Wasm environment" + ) + self.share = False + + if self.share: + try: + if self.share_url is None: + self.share_url = networking.setup_tunnel( + self.server_name, self.server_port, self.share_token + ) + print(strings.en["SHARE_LINK_DISPLAY"].format(self.share_url)) + if not (quiet): + print(strings.en["SHARE_LINK_MESSAGE"]) + except (RuntimeError, requests.exceptions.ConnectionError): + if self.analytics_enabled: + analytics.error_analytics("Not able to set up tunnel") + self.share_url = None + self.share = False + if Path(BINARY_PATH).exists(): + print(strings.en["COULD_NOT_GET_SHARE_LINK"]) + else: + print( + strings.en["COULD_NOT_GET_SHARE_LINK_MISSING_FILE"].format( + BINARY_PATH, + BINARY_URL, + BINARY_FILENAME, + BINARY_FOLDER, + ) + ) + else: + if not quiet and not wasm_utils.IS_WASM: + print(strings.en["PUBLIC_SHARE_TRUE"]) + self.share_url = None + + if inbrowser and not wasm_utils.IS_WASM: + link = self.share_url if self.share and self.share_url else self.local_url + webbrowser.open(link) + + # Check if running in a Python notebook in which case, display inline + if inline is None: + inline = utils.ipython_check() + if inline: + try: + from IPython.display import HTML, Javascript, display # type: ignore + + if self.share and self.share_url: + while not networking.url_ok(self.share_url): + time.sleep(0.25) + artifact = HTML( + f'
' + ) + + elif self.is_colab: + # modified from /usr/local/lib/python3.7/dist-packages/google/colab/output/_util.py within Colab environment + code = """(async (port, path, width, height, cache, element) => { + if (!google.colab.kernel.accessAllowed && !cache) { + return; + } + element.appendChild(document.createTextNode('')); + const url = await google.colab.kernel.proxyPort(port, {cache}); + + const external_link = document.createElement('div'); + external_link.innerHTML = ` +
+ `; + element.appendChild(external_link); + + const iframe = document.createElement('iframe'); + iframe.src = new URL(path, url).toString(); + iframe.height = height; + iframe.allow = "autoplay; camera; microphone; clipboard-read; clipboard-write;" + iframe.width = width; + iframe.style.border = 0; + element.appendChild(iframe); + })""" + "({port}, {path}, {width}, {height}, {cache}, window.element)".format( + port=json.dumps(self.server_port), + path=json.dumps("/"), + width=json.dumps(self.width), + height=json.dumps(self.height), + cache=json.dumps(False), + ) + + artifact = Javascript(code) + else: + artifact = HTML( + f'
' + ) + self.artifact = artifact + display(artifact) + except ImportError: + pass + + if getattr(self, "analytics_enabled", False): + data = { + "launch_method": "browser" if inbrowser else "inline", + "is_google_colab": self.is_colab, + "is_sharing_on": self.share, + "share_url": self.share_url, + "enable_queue": self.enable_queue, + "show_tips": self.show_tips, + "server_name": server_name, + "server_port": server_port, + "is_space": self.space_id is not None, + "mode": self.mode, + } + analytics.launched_analytics(self, data) + + utils.show_tip(self) + + # Block main thread if debug==True + if debug or int(os.getenv("GRADIO_DEBUG", 0)) == 1 and not wasm_utils.IS_WASM: + self.block_thread() + # Block main thread if running in a script to stop script from exiting + is_in_interactive_mode = bool(getattr(sys, "ps1", sys.flags.interactive)) + + if ( + not prevent_thread_lock + and not is_in_interactive_mode + # In the Wasm env, we don't have to block the main thread because the server won't be shut down after the execution finishes. + # Moreover, we MUST NOT do it because there is only one thread in the Wasm env and blocking it will stop the subsequent code from running. + and not wasm_utils.IS_WASM + ): + self.block_thread() + + return TupleNoPrint((self.server_app, self.local_url, self.share_url)) + + def integrate( + self, + comet_ml=None, + wandb: ModuleType | None = None, + mlflow: ModuleType | None = None, + ) -> None: + """ + A catch-all method for integrating with other libraries. This method should be run after launch() + Parameters: + comet_ml: If a comet_ml Experiment object is provided, will integrate with the experiment and appear on Comet dashboard + wandb: If the wandb module is provided, will integrate with it and appear on WandB dashboard + mlflow: If the mlflow module is provided, will integrate with the experiment and appear on ML Flow dashboard + """ + analytics_integration = "" + if comet_ml is not None: + analytics_integration = "CometML" + comet_ml.log_other("Created from", "Gradio") + if self.share_url is not None: + comet_ml.log_text(f"gradio: {self.share_url}") + comet_ml.end() + elif self.local_url: + comet_ml.log_text(f"gradio: {self.local_url}") + comet_ml.end() + else: + raise ValueError("Please run `launch()` first.") + if wandb is not None: + analytics_integration = "WandB" + if self.share_url is not None: + wandb.log( + { + "Gradio panel": wandb.Html( + '' + ) + } + ) + else: + print( + "The WandB integration requires you to " + "`launch(share=True)` first." + ) + if mlflow is not None: + analytics_integration = "MLFlow" + if self.share_url is not None: + mlflow.log_param("Gradio Interface Share Link", self.share_url) + else: + mlflow.log_param("Gradio Interface Local Link", self.local_url) + if self.analytics_enabled and analytics_integration: + data = {"integration": analytics_integration} + analytics.integration_analytics(data) + + def close(self, verbose: bool = True) -> None: + """ + Closes the Interface that was launched and frees the port. + """ + try: + if self.enable_queue: + self._queue.close() + if self.server: + self.server.close() + self.is_running = False + # So that the startup events (starting the queue) + # happen the next time the app is launched + self.app.startup_events_triggered = False + if verbose: + print(f"Closing server running on port: {self.server_port}") + except (AttributeError, OSError): # can't close if not running + pass + + def block_thread( + self, + ) -> None: + """Block main thread until interrupted by user.""" + try: + while True: + time.sleep(0.1) + except (KeyboardInterrupt, OSError): + print("Keyboard interruption in main thread... closing server.") + if self.server: + self.server.close() + for tunnel in CURRENT_TUNNELS: + tunnel.kill() + + def attach_load_events(self): + """Add a load event for every component whose initial value should be randomized.""" + if Context.root_block: + for component in Context.root_block.blocks.values(): + if ( + isinstance(component, components.IOComponent) + and component.load_event_to_attach + ): + load_fn, every = component.load_event_to_attach + # Use set_event_trigger to avoid ambiguity between load class/instance method + from gradio.events import EventListenerMethod + + dep = self.set_event_trigger( + [EventListenerMethod(self, "load")], + load_fn, + None, + component, + no_target=True, + # If every is None, for sure skip the queue + # else, let the enable_queue parameter take precedence + # this will raise a nice error message is every is used + # without queue + queue=False if every is None else None, + every=every, + )[0] + component.load_event = dep + + def startup_events(self): + """Events that should be run when the app containing this block starts up.""" + + if self.enable_queue: + self._queue.start() + # So that processing can resume in case the queue was stopped + self._queue.stopped = False + self.create_limiter() + + def queue_enabled_for_fn(self, fn_index: int): + if self.dependencies[fn_index]["queue"] is None: + return self.enable_queue + return self.dependencies[fn_index]["queue"] diff --git a/testbed/gradio-app__gradio/gradio/chat_interface.py b/testbed/gradio-app__gradio/gradio/chat_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..a27d92c92d5c4c9dae7da8cc77207c52ed5a71dc --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/chat_interface.py @@ -0,0 +1,495 @@ +""" +This file defines a useful high-level abstraction to build Gradio chatbots: ChatInterface. +""" + + +from __future__ import annotations + +import inspect +from typing import AsyncGenerator, Callable + +import anyio +from gradio_client import utils as client_utils +from gradio_client.documentation import document, set_documentation_group + +from gradio.blocks import Blocks +from gradio.components import ( + Button, + Chatbot, + IOComponent, + Markdown, + State, + Textbox, + get_component_instance, +) +from gradio.events import Dependency, EventListenerMethod, on +from gradio.helpers import create_examples as Examples # noqa: N812 +from gradio.layouts import Accordion, Column, Group, Row +from gradio.themes import ThemeClass as Theme +from gradio.utils import SyncToAsyncIterator, async_iteration + +set_documentation_group("chatinterface") + + +@document() +class ChatInterface(Blocks): + """ + ChatInterface is Gradio's high-level abstraction for creating chatbot UIs, and allows you to create + a web-based demo around a chatbot model in a few lines of code. Only one parameter is required: fn, which + takes a function that governs the response of the chatbot based on the user input and chat history. Additional + parameters can be used to control the appearance and behavior of the demo. + + Example: + import gradio as gr + + def echo(message, history): + return message + + demo = gr.ChatInterface(fn=echo, examples=["hello", "hola", "merhaba"], title="Echo Bot") + demo.launch() + Demos: chatinterface_random_response, chatinterface_streaming_echo + Guides: creating-a-chatbot-fast, sharing-your-app + """ + + def __init__( + self, + fn: Callable, + *, + chatbot: Chatbot | None = None, + textbox: Textbox | None = None, + additional_inputs: str | IOComponent | list[str | IOComponent] | None = None, + additional_inputs_accordion_name: str = "Additional Inputs", + examples: list[str] | None = None, + cache_examples: bool | None = None, + title: str | None = None, + description: str | None = None, + theme: Theme | str | None = None, + css: str | None = None, + analytics_enabled: bool | None = None, + submit_btn: str | None | Button = "Submit", + stop_btn: str | None | Button = "Stop", + retry_btn: str | None | Button = "🔄 Retry", + undo_btn: str | None | Button = "↩️ Undo", + clear_btn: str | None | Button = "🗑️ Clear", + autofocus: bool = True, + ): + """ + Parameters: + fn: the function to wrap the chat interface around. Should accept two parameters: a string input message and list of two-element lists of the form [[user_message, bot_message], ...] representing the chat history, and return a string response. See the Chatbot documentation for more information on the chat history format. + chatbot: an instance of the gr.Chatbot component to use for the chat interface, if you would like to customize the chatbot properties. If not provided, a default gr.Chatbot component will be created. + textbox: an instance of the gr.Textbox component to use for the chat interface, if you would like to customize the textbox properties. If not provided, a default gr.Textbox component will be created. + additional_inputs: an instance or list of instances of gradio components (or their string shortcuts) to use as additional inputs to the chatbot. If components are not already rendered in a surrounding Blocks, then the components will be displayed under the chatbot, in an accordion. + additional_inputs_accordion_name: the label of the accordion to use for additional inputs, only used if additional_inputs is provided. + examples: sample inputs for the function; if provided, appear below the chatbot and can be clicked to populate the chatbot input. + cache_examples: If True, caches examples in the server for fast runtime in examples. The default option in HuggingFace Spaces is True. The default option elsewhere is False. + title: a title for the interface; if provided, appears above chatbot in large font. Also used as the tab title when opened in a browser window. + description: a description for the interface; if provided, appears above the chatbot and beneath the title in regular font. Accepts Markdown and HTML content. + theme: Theme to use, loaded from gradio.themes. + css: custom css or path to custom css file to use with interface. + analytics_enabled: Whether to allow basic telemetry. If None, will use GRADIO_ANALYTICS_ENABLED environment variable if defined, or default to True. + submit_btn: Text to display on the submit button. If None, no button will be displayed. If a Button object, that button will be used. + stop_btn: Text to display on the stop button, which replaces the submit_btn when the submit_btn or retry_btn is clicked and response is streaming. Clicking on the stop_btn will halt the chatbot response. If set to None, stop button functionality does not appear in the chatbot. If a Button object, that button will be used as the stop button. + retry_btn: Text to display on the retry button. If None, no button will be displayed. If a Button object, that button will be used. + undo_btn: Text to display on the delete last button. If None, no button will be displayed. If a Button object, that button will be used. + clear_btn: Text to display on the clear button. If None, no button will be displayed. If a Button object, that button will be used. + autofocus: If True, autofocuses to the textbox when the page loads. + """ + super().__init__( + analytics_enabled=analytics_enabled, + mode="chat_interface", + css=css, + title=title or "Gradio", + theme=theme, + ) + self.fn = fn + self.is_async = inspect.iscoroutinefunction( + self.fn + ) or inspect.isasyncgenfunction(self.fn) + self.is_generator = inspect.isgeneratorfunction( + self.fn + ) or inspect.isasyncgenfunction(self.fn) + self.examples = examples + if self.space_id and cache_examples is None: + self.cache_examples = True + else: + self.cache_examples = cache_examples or False + self.buttons: list[Button] = [] + + if additional_inputs: + if not isinstance(additional_inputs, list): + additional_inputs = [additional_inputs] + self.additional_inputs = [ + get_component_instance(i) for i in additional_inputs # type: ignore + ] + else: + self.additional_inputs = [] + self.additional_inputs_accordion_name = additional_inputs_accordion_name + + with self: + if title: + Markdown( + f"

{self.title}

" + ) + if description: + Markdown(description) + + with Column(variant="panel"): + if chatbot: + self.chatbot = chatbot.render() + else: + self.chatbot = Chatbot(label="Chatbot") + + with Group(): + with Row(): + if textbox: + textbox.container = False + textbox.show_label = False + self.textbox = textbox.render() + else: + self.textbox = Textbox( + container=False, + show_label=False, + label="Message", + placeholder="Type a message...", + scale=7, + autofocus=autofocus, + ) + if submit_btn: + if isinstance(submit_btn, Button): + submit_btn.render() + elif isinstance(submit_btn, str): + submit_btn = Button( + submit_btn, + variant="primary", + scale=1, + min_width=150, + ) + else: + raise ValueError( + f"The submit_btn parameter must be a gr.Button, string, or None, not {type(submit_btn)}" + ) + if stop_btn: + if isinstance(stop_btn, Button): + stop_btn.visible = False + stop_btn.render() + elif isinstance(stop_btn, str): + stop_btn = Button( + stop_btn, + variant="stop", + visible=False, + scale=1, + min_width=150, + ) + else: + raise ValueError( + f"The stop_btn parameter must be a gr.Button, string, or None, not {type(stop_btn)}" + ) + self.buttons.extend([submit_btn, stop_btn]) + + with Row(): + for btn in [retry_btn, undo_btn, clear_btn]: + if btn: + if isinstance(btn, Button): + btn.render() + elif isinstance(btn, str): + btn = Button(btn, variant="secondary") + else: + raise ValueError( + f"All the _btn parameters must be a gr.Button, string, or None, not {type(btn)}" + ) + self.buttons.append(btn) + + self.fake_api_btn = Button("Fake API", visible=False) + self.fake_response_textbox = Textbox( + label="Response", visible=False + ) + ( + self.submit_btn, + self.stop_btn, + self.retry_btn, + self.undo_btn, + self.clear_btn, + ) = self.buttons + + if examples: + if self.is_generator: + examples_fn = self._examples_stream_fn + else: + examples_fn = self._examples_fn + + self.examples_handler = Examples( + examples=examples, + inputs=[self.textbox] + self.additional_inputs, + outputs=self.chatbot, + fn=examples_fn, + ) + + any_unrendered_inputs = any( + not inp.is_rendered for inp in self.additional_inputs + ) + if self.additional_inputs and any_unrendered_inputs: + with Accordion(self.additional_inputs_accordion_name, open=False): + for input_component in self.additional_inputs: + if not input_component.is_rendered: + input_component.render() + + # The example caching must happen after the input components have rendered + if cache_examples: + client_utils.synchronize_async(self.examples_handler.cache) + + self.saved_input = State() + self.chatbot_state = State([]) + + self._setup_events() + self._setup_api() + + def _setup_events(self) -> None: + submit_fn = self._stream_fn if self.is_generator else self._submit_fn + submit_triggers = ( + [self.textbox.submit, self.submit_btn.click] + if self.submit_btn + else [self.textbox.submit] + ) + submit_event = ( + on( + submit_triggers, + self._clear_and_save_textbox, + [self.textbox], + [self.textbox, self.saved_input], + api_name=False, + queue=False, + ) + .then( + self._display_input, + [self.saved_input, self.chatbot_state], + [self.chatbot, self.chatbot_state], + api_name=False, + queue=False, + ) + .then( + submit_fn, + [self.saved_input, self.chatbot_state] + self.additional_inputs, + [self.chatbot, self.chatbot_state], + api_name=False, + ) + ) + self._setup_stop_events(submit_triggers, submit_event) + + if self.retry_btn: + retry_event = ( + self.retry_btn.click( + self._delete_prev_fn, + [self.chatbot_state], + [self.chatbot, self.saved_input, self.chatbot_state], + api_name=False, + queue=False, + ) + .then( + self._display_input, + [self.saved_input, self.chatbot_state], + [self.chatbot, self.chatbot_state], + api_name=False, + queue=False, + ) + .then( + submit_fn, + [self.saved_input, self.chatbot_state] + self.additional_inputs, + [self.chatbot, self.chatbot_state], + api_name=False, + ) + ) + self._setup_stop_events([self.retry_btn.click], retry_event) + + if self.undo_btn: + self.undo_btn.click( + self._delete_prev_fn, + [self.chatbot_state], + [self.chatbot, self.saved_input, self.chatbot_state], + api_name=False, + queue=False, + ).then( + lambda x: x, + [self.saved_input], + [self.textbox], + api_name=False, + queue=False, + ) + + if self.clear_btn: + self.clear_btn.click( + lambda: ([], [], None), + None, + [self.chatbot, self.chatbot_state, self.saved_input], + queue=False, + api_name=False, + ) + + def _setup_stop_events( + self, event_triggers: list[EventListenerMethod], event_to_cancel: Dependency + ) -> None: + if self.stop_btn and self.is_generator: + if self.submit_btn: + for event_trigger in event_triggers: + event_trigger( + lambda: ( + Button.update(visible=False), + Button.update(visible=True), + ), + None, + [self.submit_btn, self.stop_btn], + api_name=False, + queue=False, + ) + event_to_cancel.then( + lambda: (Button.update(visible=True), Button.update(visible=False)), + None, + [self.submit_btn, self.stop_btn], + api_name=False, + queue=False, + ) + else: + for event_trigger in event_triggers: + event_trigger( + lambda: Button.update(visible=True), + None, + [self.stop_btn], + api_name=False, + queue=False, + ) + event_to_cancel.then( + lambda: Button.update(visible=False), + None, + [self.stop_btn], + api_name=False, + queue=False, + ) + self.stop_btn.click( + None, + None, + None, + cancels=event_to_cancel, + api_name=False, + ) + + def _setup_api(self) -> None: + api_fn = self._api_stream_fn if self.is_generator else self._api_submit_fn + + self.fake_api_btn.click( + api_fn, + [self.textbox, self.chatbot_state] + self.additional_inputs, + [self.textbox, self.chatbot_state], + api_name="chat", + ) + + def _clear_and_save_textbox(self, message: str) -> tuple[str, str]: + return "", message + + def _display_input( + self, message: str, history: list[list[str | None]] + ) -> tuple[list[list[str | None]], list[list[str | None]]]: + history.append([message, None]) + return history, history + + async def _submit_fn( + self, + message: str, + history_with_input: list[list[str | None]], + *args, + ) -> tuple[list[list[str | None]], list[list[str | None]]]: + history = history_with_input[:-1] + if self.is_async: + response = await self.fn(message, history, *args) + else: + response = await anyio.to_thread.run_sync( + self.fn, message, history, *args, limiter=self.limiter + ) + history.append([message, response]) + return history, history + + async def _stream_fn( + self, + message: str, + history_with_input: list[list[str | None]], + *args, + ) -> AsyncGenerator: + history = history_with_input[:-1] + if self.is_async: + generator = self.fn(message, history, *args) + else: + generator = await anyio.to_thread.run_sync( + self.fn, message, history, *args, limiter=self.limiter + ) + generator = SyncToAsyncIterator(generator, self.limiter) + try: + first_response = await async_iteration(generator) + update = history + [[message, first_response]] + yield update, update + except StopIteration: + update = history + [[message, None]] + yield update, update + async for response in generator: + update = history + [[message, response]] + yield update, update + + async def _api_submit_fn( + self, message: str, history: list[list[str | None]], *args + ) -> tuple[str, list[list[str | None]]]: + if self.is_async: + response = await self.fn(message, history, *args) + else: + response = await anyio.to_thread.run_sync( + self.fn, message, history, *args, limiter=self.limiter + ) + history.append([message, response]) + return response, history + + async def _api_stream_fn( + self, message: str, history: list[list[str | None]], *args + ) -> AsyncGenerator: + if self.is_async: + generator = self.fn(message, history, *args) + else: + generator = await anyio.to_thread.run_sync( + self.fn, message, history, *args, limiter=self.limiter + ) + generator = SyncToAsyncIterator(generator, self.limiter) + try: + first_response = await async_iteration(generator) + yield first_response, history + [[message, first_response]] + except StopIteration: + yield None, history + [[message, None]] + async for response in generator: + yield response, history + [[message, response]] + + async def _examples_fn(self, message: str, *args) -> list[list[str | None]]: + if self.is_async: + response = await self.fn(message, [], *args) + else: + response = await anyio.to_thread.run_sync( + self.fn, message, [], *args, limiter=self.limiter + ) + return [[message, response]] + + async def _examples_stream_fn( + self, + message: str, + *args, + ) -> AsyncGenerator: + if self.is_async: + generator = self.fn(message, [], *args) + else: + generator = await anyio.to_thread.run_sync( + self.fn, message, [], *args, limiter=self.limiter + ) + generator = SyncToAsyncIterator(generator, self.limiter) + async for response in generator: + yield [[message, response]] + + def _delete_prev_fn( + self, history: list[list[str | None]] + ) -> tuple[list[list[str | None]], str, list[list[str | None]]]: + try: + message, _ = history.pop() + except IndexError: + message = "" + return history, message or "", history diff --git a/testbed/gradio-app__gradio/gradio/cli_env_info.py b/testbed/gradio-app__gradio/gradio/cli_env_info.py new file mode 100644 index 0000000000000000000000000000000000000000..29df6e344144682309ad6811010256c8b10e422a --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/cli_env_info.py @@ -0,0 +1,38 @@ +""" This file is the part of 'gradio/cli.py' for printing the environment info +for the cli command 'gradio environment' +""" +import platform +from importlib import metadata + + +def print_environment_info(): + print("Gradio Environment Information:\n------------------------------") + print("Operating System:", platform.system()) + + for package_name in ["gradio", "gradio_client"]: + try: + package_version = metadata.version(package_name) + print(f"{package_name} version:", package_version) + except metadata.PackageNotFoundError: + print(f"{package_name} package is not installed.") + print("\n------------------------------------------------") + for package_name in ["gradio", "gradio_client"]: + try: + dist = metadata.distribution(package_name) + print(f"{package_name} dependencies in your environment:\n") + if dist.requires is not None: + for req in dist.requires: + req_base_name = ( + req.split(">")[0] + .split("<")[0] + .split("~")[0] + .split("[")[0] + .split("!")[0] + ) + try: + print(f"{req_base_name}: {metadata.version(req_base_name)}") + except metadata.PackageNotFoundError: + print(f"{req_base_name} is not installed.") + print("\n") + except metadata.PackageNotFoundError: + print(f"{package_name} package is not installed.") diff --git a/testbed/gradio-app__gradio/gradio/components/__init__.py b/testbed/gradio-app__gradio/gradio/components/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b394643122371557ae59a59a5391361a48172e3c --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/__init__.py @@ -0,0 +1,120 @@ +from gradio.components.annotated_image import AnnotatedImage +from gradio.components.audio import Audio +from gradio.components.bar_plot import BarPlot +from gradio.components.base import ( + Column, + Component, + Form, + FormComponent, + IOComponent, + Row, + _Keywords, + component, + get_component_instance, +) +from gradio.components.button import Button +from gradio.components.carousel import Carousel +from gradio.components.chatbot import Chatbot +from gradio.components.checkbox import Checkbox +from gradio.components.checkboxgroup import CheckboxGroup +from gradio.components.clear_button import ClearButton +from gradio.components.code import Code +from gradio.components.color_picker import ColorPicker +from gradio.components.dataframe import Dataframe +from gradio.components.dataset import Dataset +from gradio.components.dropdown import Dropdown +from gradio.components.duplicate_button import DuplicateButton +from gradio.components.file import File +from gradio.components.gallery import Gallery +from gradio.components.highlighted_text import HighlightedText +from gradio.components.html import HTML +from gradio.components.image import Image +from gradio.components.interpretation import Interpretation +from gradio.components.json_component import JSON +from gradio.components.label import Label +from gradio.components.line_plot import LinePlot +from gradio.components.login_button import LoginButton +from gradio.components.logout_button import LogoutButton +from gradio.components.markdown import Markdown +from gradio.components.model3d import Model3D +from gradio.components.number import Number +from gradio.components.plot import Plot +from gradio.components.radio import Radio +from gradio.components.scatter_plot import ScatterPlot +from gradio.components.slider import Slider +from gradio.components.state import State, Variable +from gradio.components.status_tracker import StatusTracker +from gradio.components.textbox import Textbox +from gradio.components.timeseries import Timeseries +from gradio.components.upload_button import UploadButton +from gradio.components.video import Video + +Text = Textbox +DataFrame = Dataframe +Highlightedtext = HighlightedText +Annotatedimage = AnnotatedImage +Highlight = HighlightedText +Checkboxgroup = CheckboxGroup +TimeSeries = Timeseries +Json = JSON + +__all__ = [ + "Audio", + "BarPlot", + "Button", + "Carousel", + "Chatbot", + "ClearButton", + "Component", + "component", + "get_component_instance", + "_Keywords", + "Checkbox", + "CheckboxGroup", + "Code", + "ColorPicker", + "Column", + "Dataframe", + "DataFrame", + "Dataset", + "DuplicateButton", + "Form", + "FormComponent", + "Gallery", + "HTML", + "Image", + "IOComponent", + "Interpretation", + "JSON", + "Json", + "Label", + "LinePlot", + "LoginButton", + "LogoutButton", + "Markdown", + "Textbox", + "Dropdown", + "Model3D", + "File", + "HighlightedText", + "AnnotatedImage", + "CheckboxGroup", + "Timeseries", + "Text", + "Highlightedtext", + "Annotatedimage", + "Highlight", + "Checkboxgroup", + "TimeSeries", + "Number", + "Plot", + "Radio", + "Row", + "ScatterPlot", + "Slider", + "State", + "Variable", + "StatusTracker", + "UploadButton", + "Video", +] diff --git a/testbed/gradio-app__gradio/gradio/components/annotated_image.py b/testbed/gradio-app__gradio/gradio/components/annotated_image.py new file mode 100644 index 0000000000000000000000000000000000000000..b3034c17e5f4138155a545fd86a9fed90ad490cb --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/annotated_image.py @@ -0,0 +1,236 @@ +"""gr.AnnotatedImage() component.""" + +from __future__ import annotations + +import warnings +from typing import Literal + +import numpy as np +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import JSONSerializable +from PIL import Image as _Image # using _ to minimize namespace pollution + +from gradio import utils +from gradio.components.base import IOComponent, _Keywords +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import ( + EventListenerMethod, + Selectable, +) + +set_documentation_group("component") + +_Image.init() # fixes https://github.com/gradio-app/gradio/issues/2843 + + +@document() +class AnnotatedImage(Selectable, IOComponent, JSONSerializable): + """ + Displays a base image and colored subsections on top of that image. Subsections can take the from of rectangles (e.g. object detection) or masks (e.g. image segmentation). + Preprocessing: this component does *not* accept input. + Postprocessing: expects a {Tuple[numpy.ndarray | PIL.Image | str, List[Tuple[numpy.ndarray | Tuple[int, int, int, int], str]]]} consisting of a base image and a list of subsections, that are either (x1, y1, x2, y2) tuples identifying object boundaries, or 0-1 confidence masks of the same shape as the image. A label is provided for each subsection. + + Demos: image_segmentation + """ + + def __init__( + self, + value: tuple[ + np.ndarray | _Image.Image | str, + list[tuple[np.ndarray | tuple[int, int, int, int], str]], + ] + | None = None, + *, + show_legend: bool = True, + height: int | None = None, + width: int | None = None, + color_map: dict[str, str] | None = None, + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + value: Tuple of base image and list of (subsection, label) pairs. + show_legend: If True, will show a legend of the subsections. + height: Height of the displayed image. + width: Width of the displayed image. + color_map: A dictionary mapping labels to colors. The colors must be specified as hex codes. + label: component name in interface. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + self.show_legend = show_legend + self.height = height + self.width = width + self.color_map = color_map + self.select: EventListenerMethod + """ + Event listener for when the user selects Image subsection. + Uses event data gradio.SelectData to carry `value` referring to selected subsection label, and `index` to refer to subsection index. + See EventData documentation on how to use this event data. + """ + IOComponent.__init__( + self, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + @staticmethod + def update( + value: tuple[ + np.ndarray | _Image.Image | str, + list[tuple[np.ndarray | tuple[int, int, int, int], str]], + ] + | Literal[_Keywords.NO_VALUE] = _Keywords.NO_VALUE, + show_legend: bool | None = None, + height: int | None = None, + width: int | None = None, + color_map: dict[str, str] | None = None, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.AnnotatedImage(...)` instead of `return gr.AnnotatedImage.update(...)`." + ) + updated_config = { + "show_legend": show_legend, + "height": height, + "width": width, + "color_map": color_map, + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "value": value, + "__type__": "update", + } + return updated_config + + def postprocess( + self, + y: tuple[ + np.ndarray | _Image.Image | str, + list[tuple[np.ndarray | tuple[int, int, int, int], str]], + ], + ) -> tuple[dict, list[tuple[dict, str]]] | None: + """ + Parameters: + y: Tuple of base image and list of subsections, with each subsection a two-part tuple where the first element is a 4 element bounding box or a 0-1 confidence mask, and the second element is the label. + Returns: + Tuple of base image file and list of subsections, with each subsection a two-part tuple where the first element image path of the mask, and the second element is the label. + """ + if y is None: + return None + base_img = y[0] + if isinstance(base_img, str): + base_img_path = base_img + base_img = np.array(_Image.open(base_img)) + elif isinstance(base_img, np.ndarray): + base_file = self.img_array_to_temp_file(base_img, dir=self.DEFAULT_TEMP_DIR) + base_img_path = str(utils.abspath(base_file)) + elif isinstance(base_img, _Image.Image): + base_file = self.pil_to_temp_file(base_img, dir=self.DEFAULT_TEMP_DIR) + base_img_path = str(utils.abspath(base_file)) + base_img = np.array(base_img) + else: + raise ValueError( + "AnnotatedImage only accepts filepaths, PIL images or numpy arrays for the base image." + ) + self.temp_files.add(base_img_path) + + sections = [] + color_map = self.color_map or {} + + def hex_to_rgb(value): + value = value.lstrip("#") + lv = len(value) + return [int(value[i : i + lv // 3], 16) for i in range(0, lv, lv // 3)] + + for mask, label in y[1]: + mask_array = np.zeros((base_img.shape[0], base_img.shape[1])) + if isinstance(mask, np.ndarray): + mask_array = mask + else: + x1, y1, x2, y2 = mask + border_width = 3 + mask_array[y1:y2, x1:x2] = 0.5 + mask_array[y1:y2, x1 : x1 + border_width] = 1 + mask_array[y1:y2, x2 - border_width : x2] = 1 + mask_array[y1 : y1 + border_width, x1:x2] = 1 + mask_array[y2 - border_width : y2, x1:x2] = 1 + + if label in color_map: + rgb_color = hex_to_rgb(color_map[label]) + else: + rgb_color = [255, 0, 0] + colored_mask = np.zeros((base_img.shape[0], base_img.shape[1], 4)) + solid_mask = np.copy(mask_array) + solid_mask[solid_mask > 0] = 1 + + colored_mask[:, :, 0] = rgb_color[0] * solid_mask + colored_mask[:, :, 1] = rgb_color[1] * solid_mask + colored_mask[:, :, 2] = rgb_color[2] * solid_mask + colored_mask[:, :, 3] = mask_array * 255 + + colored_mask_img = _Image.fromarray((colored_mask).astype(np.uint8)) + + mask_file = self.pil_to_temp_file( + colored_mask_img, dir=self.DEFAULT_TEMP_DIR + ) + mask_file_path = str(utils.abspath(mask_file)) + self.temp_files.add(mask_file_path) + + sections.append( + ({"name": mask_file_path, "data": None, "is_file": True}, label) + ) + + return {"name": base_img_path, "data": None, "is_file": True}, sections + + def style( + self, + *, + height: int | None = None, + width: int | None = None, + color_map: dict[str, str] | None = None, + **kwargs, + ): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if height is not None: + self.height = height + if width is not None: + self.width = width + if color_map is not None: + self.color_map = color_map + return self diff --git a/testbed/gradio-app__gradio/gradio/components/audio.py b/testbed/gradio-app__gradio/gradio/components/audio.py new file mode 100644 index 0000000000000000000000000000000000000000..22436d1c6209aa7db1838a70688cc41705adc40f --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/audio.py @@ -0,0 +1,395 @@ +"""gr.Audio() component.""" + +from __future__ import annotations + +import tempfile +import warnings +from pathlib import Path +from typing import Any, Callable, Literal + +import numpy as np +import requests +from gradio_client import media_data +from gradio_client import utils as client_utils +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import FileSerializable + +from gradio import processing_utils, utils +from gradio.components.base import IOComponent, _Keywords +from gradio.events import ( + Changeable, + Clearable, + Playable, + Recordable, + Streamable, + StreamableOutput, + Uploadable, +) +from gradio.interpretation import TokenInterpretable + +set_documentation_group("component") + + +@document() +class Audio( + Changeable, + Clearable, + Playable, + Recordable, + Streamable, + StreamableOutput, + Uploadable, + IOComponent, + FileSerializable, + TokenInterpretable, +): + """ + Creates an audio component that can be used to upload/record audio (as an input) or display audio (as an output). + Preprocessing: passes the uploaded audio as a {Tuple(int, numpy.array)} corresponding to (sample rate in Hz, audio data as a 16-bit int array whose values range from -32768 to 32767), or as a {str} filepath, depending on `type`. + Postprocessing: expects a {Tuple(int, numpy.array)} corresponding to (sample rate in Hz, audio data as a float or int numpy array) or as a {str} or {pathlib.Path} filepath or URL to an audio file, or bytes for binary content (recommended for streaming) + Examples-format: a {str} filepath to a local file that contains audio. + Demos: main_note, generate_tone, reverse_audio + Guides: real-time-speech-recognition + """ + + def __init__( + self, + value: str | Path | tuple[int, np.ndarray] | Callable | None = None, + *, + source: Literal["upload", "microphone"] | None = None, + type: Literal["numpy", "filepath"] = "numpy", + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + interactive: bool | None = None, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + format: Literal["wav", "mp3"] = "wav", + autoplay: bool = False, + show_download_button=True, + show_share_button: bool | None = None, + show_edit_button: bool | None = True, + **kwargs, + ): + """ + Parameters: + value: A path, URL, or [sample_rate, numpy array] tuple (sample rate in Hz, audio data as a float or int numpy array) for the default value that Audio component is going to take. If callable, the function will be called whenever the app loads to set the initial value of the component. + source: Source of audio. "upload" creates a box where user can drop an audio file, "microphone" creates a microphone input. + type: The format the audio file is converted to before being passed into the prediction function. "numpy" converts the audio to a tuple consisting of: (int sample rate, numpy.array for the data), "filepath" passes a str path to a temporary file containing the audio. + label: component name in interface. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: if True, will allow users to upload and edit a audio file; if False, can only be used to play audio. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + streaming: If set to True when used in a `live` interface as an input, will automatically stream webcam feed. When used set as an output, takes audio chunks yield from the backend and combines them into one streaming audio output. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + format: The file format to save audio files. Either 'wav' or 'mp3'. wav files are lossless but will tend to be larger files. mp3 files tend to be smaller. Default is wav. Applies both when this component is used as an input (when `type` is "format") and when this component is used as an output. + autoplay: Whether to automatically play the audio when the component is used as an output. Note: browsers will not autoplay audio files if the user has not interacted with the page yet. + show_download_button: If True, will show a download button in the corner of the component for saving audio. If False, icon does not appear. + show_share_button: If True, will show a share icon in the corner of the component that allows user to share outputs to Hugging Face Spaces Discussions. If False, icon does not appear. If set to None (default behavior), then the icon appears if this Gradio app is launched on Spaces, but not otherwise. + show_edit_button: If True, will show an edit icon in the corner of the component that allows user to edit the audio. If False, icon does not appear. Default is True. + """ + valid_sources = ["upload", "microphone"] + source = source if source else ("microphone" if streaming else "upload") + if source not in valid_sources: + raise ValueError( + f"Invalid value for parameter `source`: {source}. Please choose from one of: {valid_sources}" + ) + self.source = source + valid_types = ["numpy", "filepath"] + if type not in valid_types: + raise ValueError( + f"Invalid value for parameter `type`: {type}. Please choose from one of: {valid_types}" + ) + self.type = type + self.streaming = streaming + if streaming and source == "upload": + raise ValueError( + "Audio streaming only available if source is 'microphone'." + ) + self.format = format + self.autoplay = autoplay + self.show_download_button = show_download_button + self.show_share_button = ( + (utils.get_space() is not None) + if show_share_button is None + else show_share_button + ) + self.show_edit_button = show_edit_button + IOComponent.__init__( + self, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + interactive=interactive, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + TokenInterpretable.__init__(self) + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": {"is_file": False, "data": media_data.BASE64_AUDIO}, + "serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/audio_sample.wav", + } + + @staticmethod + def update( + value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + source: Literal["upload", "microphone"] | None = None, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + interactive: bool | None = None, + visible: bool | None = None, + autoplay: bool | None = None, + show_download_button: bool | None = None, + show_share_button: bool | None = None, + show_edit_button: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Audio(...)` instead of `return gr.Audio.update(...)`." + ) + return { + "source": source, + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "interactive": interactive, + "visible": visible, + "value": value, + "autoplay": autoplay, + "show_download_button": show_download_button, + "show_share_button": show_share_button, + "show_edit_button": show_edit_button, + "__type__": "update", + } + + def preprocess( + self, x: dict[str, Any] | None + ) -> tuple[int, np.ndarray] | str | None: + """ + Parameters: + x: dictionary with keys "name", "data", "is_file", "crop_min", "crop_max". + Returns: + audio in requested format + """ + if x is None: + return x + file_name, file_data, is_file = ( + x["name"], + x["data"], + x.get("is_file", False), + ) + crop_min, crop_max = x.get("crop_min", 0), x.get("crop_max", 100) + if is_file: + if client_utils.is_http_url_like(file_name): + temp_file_path = self.download_temp_copy_if_needed(file_name) + else: + temp_file_path = self.make_temp_copy_if_needed(file_name) + else: + temp_file_path = self.base64_to_temp_file_if_needed(file_data, file_name) + + sample_rate, data = processing_utils.audio_from_file( + temp_file_path, crop_min=crop_min, crop_max=crop_max + ) + + # Need a unique name for the file to avoid re-using the same audio file if + # a user submits the same audio file twice, but with different crop min/max. + temp_file_path = Path(temp_file_path) + output_file_name = str( + temp_file_path.with_name( + f"{temp_file_path.stem}-{crop_min}-{crop_max}{temp_file_path.suffix}" + ) + ) + + if self.type == "numpy": + return sample_rate, data + elif self.type == "filepath": + output_file = str(Path(output_file_name).with_suffix(f".{self.format}")) + processing_utils.audio_to_file( + sample_rate, data, output_file, format=self.format + ) + return output_file + else: + raise ValueError( + "Unknown type: " + + str(self.type) + + ". Please choose from: 'numpy', 'filepath'." + ) + + def set_interpret_parameters(self, segments: int = 8): + """ + Calculates interpretation score of audio subsections by splitting the audio into subsections, then using a "leave one out" method to calculate the score of each subsection by removing the subsection and measuring the delta of the output value. + Parameters: + segments: Number of interpretation segments to split audio into. + """ + self.interpretation_segments = segments + return self + + def tokenize(self, x): + if x.get("is_file"): + sample_rate, data = processing_utils.audio_from_file(x["name"]) + else: + file_name = self.base64_to_temp_file_if_needed(x["data"]) + sample_rate, data = processing_utils.audio_from_file(file_name) + leave_one_out_sets = [] + tokens = [] + masks = [] + duration = data.shape[0] + boundaries = np.linspace(0, duration, self.interpretation_segments + 1).tolist() + boundaries = [round(boundary) for boundary in boundaries] + for index in range(len(boundaries) - 1): + start, stop = boundaries[index], boundaries[index + 1] + masks.append((start, stop)) + + # Handle the leave one outs + leave_one_out_data = np.copy(data) + leave_one_out_data[start:stop] = 0 + file = tempfile.NamedTemporaryFile( + delete=False, suffix=".wav", dir=self.DEFAULT_TEMP_DIR + ) + processing_utils.audio_to_file(sample_rate, leave_one_out_data, file.name) + out_data = client_utils.encode_file_to_base64(file.name) + leave_one_out_sets.append(out_data) + file.close() + Path(file.name).unlink() + + # Handle the tokens + token = np.copy(data) + token[0:start] = 0 + token[stop:] = 0 + file = tempfile.NamedTemporaryFile( + delete=False, suffix=".wav", dir=self.DEFAULT_TEMP_DIR + ) + processing_utils.audio_to_file(sample_rate, token, file.name) + token_data = client_utils.encode_file_to_base64(file.name) + file.close() + Path(file.name).unlink() + + tokens.append(token_data) + tokens = [{"name": "token.wav", "data": token} for token in tokens] + leave_one_out_sets = [ + {"name": "loo.wav", "data": loo_set} for loo_set in leave_one_out_sets + ] + return tokens, leave_one_out_sets, masks + + def get_masked_inputs(self, tokens, binary_mask_matrix): + # create a "zero input" vector and get sample rate + x = tokens[0]["data"] + file_name = self.base64_to_temp_file_if_needed(x) + sample_rate, data = processing_utils.audio_from_file(file_name) + zero_input = np.zeros_like(data, dtype="int16") + # decode all of the tokens + token_data = [] + for token in tokens: + file_name = self.base64_to_temp_file_if_needed(token["data"]) + _, data = processing_utils.audio_from_file(file_name) + token_data.append(data) + # construct the masked version + masked_inputs = [] + for binary_mask_vector in binary_mask_matrix: + masked_input = np.copy(zero_input) + for t, b in zip(token_data, binary_mask_vector): + masked_input = masked_input + t * int(b) + file = tempfile.NamedTemporaryFile(delete=False, dir=self.DEFAULT_TEMP_DIR) + processing_utils.audio_to_file(sample_rate, masked_input, file.name) + masked_data = client_utils.encode_file_to_base64(file.name) + file.close() + Path(file.name).unlink() + masked_inputs.append(masked_data) + return masked_inputs + + def postprocess( + self, y: tuple[int, np.ndarray] | str | Path | bytes | None + ) -> str | dict | bytes | None: + """ + Parameters: + y: audio data in either of the following formats: a tuple of (sample_rate, data), or a string filepath or URL to an audio file, or None. + Returns: + base64 url data + """ + if y is None: + return None + if isinstance(y, bytes): + if self.streaming: + return y + file_path = self.file_bytes_to_file(y, "audio") + elif isinstance(y, str) and client_utils.is_http_url_like(y): + return {"name": y, "data": None, "is_file": True} + elif isinstance(y, tuple): + sample_rate, data = y + file_path = self.audio_to_temp_file( + data, + sample_rate, + format=self.format, + ) + self.temp_files.add(file_path) + else: + file_path = self.make_temp_copy_if_needed(y) + return { + "name": file_path, + "data": None, + "is_file": True, + "orig_name": Path(file_path).name, + } + + def stream_output(self, y, output_id: str, first_chunk: bool): + output_file = { + "name": output_id, + "is_stream": True, + "is_file": False, + } + if y is None: + return None, output_file + if isinstance(y, bytes): + return y, output_file + if client_utils.is_http_url_like(y["name"]): + response = requests.get(y["name"]) + binary_data = response.content + else: + output_file["orig_name"] = y["orig_name"] + file_path = y["name"] + is_wav = file_path.endswith(".wav") + with open(file_path, "rb") as f: + binary_data = f.read() + if is_wav: + # strip length information from first chunk header, remove headers entirely from subsequent chunks + if first_chunk: + binary_data = ( + binary_data[:4] + b"\xFF\xFF\xFF\xFF" + binary_data[8:] + ) + binary_data = ( + binary_data[:40] + b"\xFF\xFF\xFF\xFF" + binary_data[44:] + ) + else: + binary_data = binary_data[44:] + return binary_data, output_file + + def check_streamable(self): + if self.source != "microphone": + raise ValueError( + "Audio streaming only available if source is 'microphone'." + ) + + def as_example(self, input_data: str | None) -> str: + return Path(input_data).name if input_data else "" diff --git a/testbed/gradio-app__gradio/gradio/components/checkbox.py b/testbed/gradio-app__gradio/gradio/components/checkbox.py new file mode 100644 index 0000000000000000000000000000000000000000..4394bb84f8fc1f49d27998bfa351038ed269b133 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/checkbox.py @@ -0,0 +1,132 @@ +"""gr.Checkbox() component.""" + +from __future__ import annotations + +import warnings +from typing import Callable, Literal + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import BooleanSerializable + +from gradio.components.base import FormComponent, IOComponent, _Keywords +from gradio.events import Changeable, EventListenerMethod, Inputable, Selectable +from gradio.interpretation import NeighborInterpretable + +set_documentation_group("component") + + +@document() +class Checkbox( + FormComponent, + Changeable, + Inputable, + Selectable, + IOComponent, + BooleanSerializable, + NeighborInterpretable, +): + """ + Creates a checkbox that can be set to `True` or `False`. + + Preprocessing: passes the status of the checkbox as a {bool} into the function. + Postprocessing: expects a {bool} returned from the function and, if it is True, checks the checkbox. + Examples-format: a {bool} representing whether the box is checked. + Demos: sentence_builder, titanic_survival + """ + + def __init__( + self, + value: bool | Callable = False, + *, + label: str | None = None, + info: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + value: if True, checked by default. If callable, the function will be called whenever the app loads to set the initial value of the component. + label: component name in interface. + info: additional component description. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: if True, this checkbox can be checked; if False, checking will be disabled. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + self.select: EventListenerMethod + """ + Event listener for when the user selects or deselects Checkbox. + Uses event data gradio.SelectData to carry `value` referring to label of checkbox, and `selected` to refer to state of checkbox. + See EventData documentation on how to use this event data. + """ + IOComponent.__init__( + self, + label=label, + info=info, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + interactive=interactive, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + NeighborInterpretable.__init__(self) + + @staticmethod + def update( + value: bool | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + label: str | None = None, + info: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + interactive: bool | None = None, + visible: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Checkbox(...)` instead of `return gr.Checkbox.update(...)`." + ) + return { + "label": label, + "info": info, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "interactive": interactive, + "visible": visible, + "value": value, + "__type__": "update", + } + + def get_interpretation_neighbors(self, x): + return [not x], {} + + def get_interpretation_scores(self, x, neighbors, scores, **kwargs): + """ + Returns: + The first value represents the interpretation score if the input is False, and the second if the input is True. + """ + if x: + return scores[0], None + else: + return None, scores[0] diff --git a/testbed/gradio-app__gradio/gradio/components/code.py b/testbed/gradio-app__gradio/gradio/components/code.py new file mode 100644 index 0000000000000000000000000000000000000000..2b3bd170caa1203532dbbeaf9185c82d4b0b7722 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/code.py @@ -0,0 +1,153 @@ +"""gr.Code() component""" + +from __future__ import annotations + +import warnings +from typing import Literal + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import StringSerializable + +from gradio.components.base import IOComponent, _Keywords +from gradio.events import Changeable, Inputable + +set_documentation_group("component") + + +@document() +class Code(Changeable, Inputable, IOComponent, StringSerializable): + """ + Creates a Code editor for entering, editing or viewing code. + Preprocessing: passes a {str} of code into the function. + Postprocessing: expects the function to return a {str} of code or a single-elment {tuple}: (string filepath,) + """ + + languages = [ + "python", + "markdown", + "json", + "html", + "css", + "javascript", + "typescript", + "yaml", + "dockerfile", + "shell", + "r", + None, + ] + + def __init__( + self, + value: str | tuple[str] | None = None, + language: Literal[ + "python", + "markdown", + "json", + "html", + "css", + "javascript", + "typescript", + "yaml", + "dockerfile", + "shell", + "r", + ] + | None = None, + *, + lines: int = 5, + label: str | None = None, + interactive: bool | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + value: Default value to show in the code editor. If callable, the function will be called whenever the app loads to set the initial value of the component. + language: The language to display the code as. Supported languages listed in `gr.Code.languages`. + label: component name in interface. + interactive: Whether user should be able to enter code or only view it. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + assert language in Code.languages, f"Language {language} not supported." + self.language = language + self.lines = lines + IOComponent.__init__( + self, + label=label, + interactive=interactive, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + def postprocess(self, y): + if y is None: + return None + elif isinstance(y, tuple): + with open(y[0]) as file_data: + return file_data.read() + else: + return y.strip() + + @staticmethod + def update( + value: str + | tuple[str] + | None + | Literal[_Keywords.NO_VALUE] = _Keywords.NO_VALUE, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + language: Literal[ + "python", + "markdown", + "json", + "html", + "css", + "javascript", + "typescript", + "yaml", + "dockerfile", + "shell", + "r", + ] + | None = None, + interactive: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Code(...)` instead of `return gr.Code.update(...)`." + ) + return { + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "value": value, + "language": language, + "interactive": interactive, + "__type__": "update", + } diff --git a/testbed/gradio-app__gradio/gradio/components/color_picker.py b/testbed/gradio-app__gradio/gradio/components/color_picker.py new file mode 100644 index 0000000000000000000000000000000000000000..f8acd134f32fd1c76b20a3664d0fe77bcbb05293 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/color_picker.py @@ -0,0 +1,141 @@ +"""gr.ColorPicker() component.""" + +from __future__ import annotations + +import warnings +from typing import Any, Callable, Literal + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import StringSerializable + +from gradio.components.base import IOComponent, _Keywords +from gradio.events import ( + Changeable, + Focusable, + Inputable, + Submittable, +) + +set_documentation_group("component") + + +@document() +class ColorPicker( + Changeable, Inputable, Submittable, Focusable, IOComponent, StringSerializable +): + """ + Creates a color picker for user to select a color as string input. + Preprocessing: passes selected color value as a {str} into the function. + Postprocessing: expects a {str} returned from function and sets color picker value to it. + Examples-format: a {str} with a hexadecimal representation of a color, e.g. "#ff0000" for red. + Demos: color_picker, color_generator + """ + + def __init__( + self, + value: str | Callable | None = None, + *, + label: str | None = None, + info: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + value: default text to provide in color picker. If callable, the function will be called whenever the app loads to set the initial value of the component. + label: component name in interface. + info: additional component description. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: if True, will be rendered as an editable color picker; if False, editing will be disabled. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + IOComponent.__init__( + self, + label=label, + info=info, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + interactive=interactive, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": "#000000", + "serialized": "#000000", + } + + @staticmethod + def update( + value: str | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + label: str | None = None, + info: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + interactive: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.ColorPicker(...)` instead of `return gr.ColorPicker.update(...)`." + ) + return { + "value": value, + "label": label, + "info": info, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "interactive": interactive, + "__type__": "update", + } + + def preprocess(self, x: str | None) -> str | None: + """ + Any preprocessing needed to be performed on function input. + Parameters: + x: text + Returns: + text + """ + if x is None: + return None + else: + return str(x) + + def postprocess(self, y: str | None) -> str | None: + """ + Any postprocessing needed to be performed on function output. + Parameters: + y: text + Returns: + text + """ + if y is None: + return None + else: + return str(y) diff --git a/testbed/gradio-app__gradio/gradio/components/interpretation.py b/testbed/gradio-app__gradio/gradio/components/interpretation.py new file mode 100644 index 0000000000000000000000000000000000000000..b261f4f637d2045f96e53da232faff3325cdc3e7 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/interpretation.py @@ -0,0 +1,55 @@ +"""gr.Interpretation() component""" + +from __future__ import annotations + +from typing import Any, Literal + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import SimpleSerializable + +from gradio.components.base import Component, _Keywords + +set_documentation_group("component") + + +@document() +class Interpretation(Component, SimpleSerializable): + """ + Used to create an interpretation widget for a component. + Preprocessing: this component does *not* accept input. + Postprocessing: expects a {dict} with keys "original" and "interpretation". + + Guides: custom-interpretations-with-blocks + """ + + def __init__( + self, + component: Component, + *, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + component: Which component to show in the interpretation widget. + visible: Whether or not the interpretation is visible. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + Component.__init__( + self, visible=visible, elem_id=elem_id, elem_classes=elem_classes, **kwargs + ) + self.component = component + + @staticmethod + def update( + value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + visible: bool | None = None, + ): + return { + "visible": visible, + "value": value, + "__type__": "update", + } diff --git a/testbed/gradio-app__gradio/gradio/components/json_component.py b/testbed/gradio-app__gradio/gradio/components/json_component.py new file mode 100644 index 0000000000000000000000000000000000000000..46bef37ba671e4a7df04d196ef63e0a742174662 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/json_component.py @@ -0,0 +1,120 @@ +"""gr.JSON() component.""" + +from __future__ import annotations + +import json +import warnings +from typing import Any, Callable, Literal + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import JSONSerializable + +from gradio.components.base import IOComponent, _Keywords +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import ( + Changeable, +) + +set_documentation_group("component") + + +@document() +class JSON(Changeable, IOComponent, JSONSerializable): + """ + Used to display arbitrary JSON output prettily. + Preprocessing: this component does *not* accept input. + Postprocessing: expects a {str} filepath to a file containing valid JSON -- or a {list} or {dict} that is valid JSON + + Demos: zip_to_json, blocks_xray + """ + + def __init__( + self, + value: str | dict | list | Callable | None = None, + *, + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + value: Default value. If callable, the function will be called whenever the app loads to set the initial value of the component. + label: component name in interface. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + IOComponent.__init__( + self, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + @staticmethod + def update( + value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.JSON(...)` instead of `return gr.JSON.update(...)`." + ) + updated_config = { + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "value": value, + "__type__": "update", + } + return updated_config + + def postprocess(self, y: dict | list | str | None) -> dict | list | None: + """ + Parameters: + y: either a string filepath to a JSON file, or a Python list or dict that can be converted to JSON + Returns: + JSON output in Python list or dict format + """ + if y is None: + return None + if isinstance(y, str): + return json.loads(y) + else: + return y + + def style(self, *, container: bool | None = None, **kwargs): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if container is not None: + self.container = container + return self diff --git a/testbed/gradio-app__gradio/gradio/components/label.py b/testbed/gradio-app__gradio/gradio/components/label.py new file mode 100644 index 0000000000000000000000000000000000000000..f6e965b0a3f819ebadcbee490bb926245c96fd81 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/label.py @@ -0,0 +1,177 @@ +"""gr.Label() component.""" + +from __future__ import annotations + +import operator +import warnings +from pathlib import Path +from typing import Callable, Literal + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import ( + JSONSerializable, +) + +from gradio.components.base import IOComponent, _Keywords +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import ( + Changeable, + EventListenerMethod, + Selectable, +) + +set_documentation_group("component") + + +@document() +class Label(Changeable, Selectable, IOComponent, JSONSerializable): + """ + Displays a classification label, along with confidence scores of top categories, if provided. + Preprocessing: this component does *not* accept input. + Postprocessing: expects a {Dict[str, float]} of classes and confidences, or {str} with just the class or an {int}/{float} for regression outputs, or a {str} path to a .json file containing a json dictionary in the structure produced by Label.postprocess(). + + Demos: main_note, titanic_survival + Guides: image-classification-in-pytorch, image-classification-in-tensorflow, image-classification-with-vision-transformers, building-a-pictionary-app + """ + + CONFIDENCES_KEY = "confidences" + + def __init__( + self, + value: dict[str, float] | str | float | Callable | None = None, + *, + num_top_classes: int | None = None, + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + color: str | None = None, + **kwargs, + ): + """ + Parameters: + value: Default value to show in the component. If a str or number is provided, simply displays the string or number. If a {Dict[str, float]} of classes and confidences is provided, displays the top class on top and the `num_top_classes` below, along with their confidence bars. If callable, the function will be called whenever the app loads to set the initial value of the component. + num_top_classes: number of most confident classes to show. + label: component name in interface. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + color: The background color of the label (either a valid css color name or hexadecimal string). + """ + self.num_top_classes = num_top_classes + self.color = color + self.select: EventListenerMethod + """ + Event listener for when the user selects a category from Label. + Uses event data gradio.SelectData to carry `value` referring to name of selected category, and `index` to refer to index. + See EventData documentation on how to use this event data. + """ + IOComponent.__init__( + self, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + def postprocess(self, y: dict[str, float] | str | float | None) -> dict | None: + """ + Parameters: + y: a dictionary mapping labels to confidence value, or just a string/numerical label by itself + Returns: + Object with key 'label' representing primary label, and key 'confidences' representing a list of label-confidence pairs + """ + if y is None or y == {}: + return {} + if isinstance(y, str) and y.endswith(".json") and Path(y).exists(): + return self.serialize(y) + if isinstance(y, (str, float, int)): + return {"label": str(y)} + if isinstance(y, dict): + if "confidences" in y and isinstance(y["confidences"], dict): + y = y["confidences"] + y = {c["label"]: c["confidence"] for c in y} + sorted_pred = sorted(y.items(), key=operator.itemgetter(1), reverse=True) + if self.num_top_classes is not None: + sorted_pred = sorted_pred[: self.num_top_classes] + return { + "label": sorted_pred[0][0], + "confidences": [ + {"label": pred[0], "confidence": pred[1]} for pred in sorted_pred + ], + } + raise ValueError( + "The `Label` output interface expects one of: a string label, or an int label, a " + "float label, or a dictionary whose keys are labels and values are confidences. " + f"Instead, got a {type(y)}" + ) + + @staticmethod + def update( + value: dict[str, float] + | str + | float + | Literal[_Keywords.NO_VALUE] + | None = _Keywords.NO_VALUE, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + color: str | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Label(...)` instead of `return gr.Label.update(...)`." + ) + # If color is not specified (NO_VALUE) map it to None so that + # it gets filtered out in postprocess. This will mean the color + # will not be updated in the front-end + if color is _Keywords.NO_VALUE: + color = None + # If the color was specified by the developer as None + # Map is so that the color is updated to be transparent, + # e.g. no background default state. + elif color is None: + color = "transparent" + return { + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "value": value, + "color": color, + "__type__": "update", + } + + def style( + self, + *, + container: bool | None = None, + ): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if container is not None: + self.container = container + return self diff --git a/testbed/gradio-app__gradio/gradio/components/login_button.py b/testbed/gradio-app__gradio/gradio/components/login_button.py new file mode 100644 index 0000000000000000000000000000000000000000..1f47a54a3c3b8a73fa3b24e967872c4125d7ca40 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/login_button.py @@ -0,0 +1,95 @@ +"""Predefined button to sign in with Hugging Face in a Gradio Space.""" +from __future__ import annotations + +import warnings +from typing import Any, Literal + +from gradio_client.documentation import document, set_documentation_group + +from gradio.components import Button +from gradio.context import Context +from gradio.routes import Request + +set_documentation_group("component") + + +@document() +class LoginButton(Button): + """ + Button that redirects the user to Sign with Hugging Face using OAuth. + """ + + is_template = True + + def __init__( + self, + *, + value: str = "Sign in with Hugging Face", + variant: Literal["primary", "secondary", "stop"] = "secondary", + size: Literal["sm", "lg"] | None = None, + icon: str + | None = "https://huggingface.co/front/assets/huggingface_logo-noborder.svg", + link: str | None = None, + visible: bool = True, + interactive: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + scale: int | None = 0, + min_width: int | None = None, + **kwargs, + ): + super().__init__( + value, + variant=variant, + size=size, + icon=icon, + link=link, + visible=visible, + interactive=interactive, + elem_id=elem_id, + elem_classes=elem_classes, + scale=scale, + min_width=min_width, + **kwargs, + ) + if Context.root_block is not None: + self.activate() + else: + warnings.warn( + "LoginButton created outside of a Blocks context. May not work unless you call its `activate()` method manually." + ) + + def activate(self): + # Taken from https://cmgdo.com/external-link-in-gradio-button/ + # Taking `self` as input to check if user is logged in + # ('self' value will be either "Sign in with Hugging Face" or "Signed in as ...") + self.click(fn=None, inputs=[self], outputs=None, _js=_js_open_if_not_logged_in) + + self.attach_load_event(self._check_login_status, None) + + def _check_login_status(self, request: Request) -> dict[str, Any]: + # Each time the page is refreshed or loaded, check if the user is logged in and adapt label + session = getattr(request, "session", None) or getattr( + request.request, "session", None + ) + if session is None or "oauth_profile" not in session: + return self.update("Sign in with Hugging Face", interactive=True) + else: + username = session["oauth_profile"]["preferred_username"] + return self.update(f"Signed in as {username}", interactive=False) + + +# JS code to redirects to /login/huggingface if user is not logged in. +# If the app is opened in an iframe, open the login page in a new tab. +# Otherwise, redirects locally. Taken from https://stackoverflow.com/a/61596084. +_js_open_if_not_logged_in = """ +(buttonValue) => { + if (!buttonValue.includes("Signed in")) { + if ( window !== window.parent ) { + window.open('/login/huggingface', '_blank'); + } else { + window.location.assign('/login/huggingface'); + } + } +} +""" diff --git a/testbed/gradio-app__gradio/gradio/components/logout_button.py b/testbed/gradio-app__gradio/gradio/components/logout_button.py new file mode 100644 index 0000000000000000000000000000000000000000..b6fd8b9ed22ab095a7c5ab119fed1e6c85a4dc56 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/logout_button.py @@ -0,0 +1,52 @@ +"""Predefined button to sign out from Hugging Face in a Gradio Space.""" +from __future__ import annotations + +from typing import Literal + +from gradio_client.documentation import document, set_documentation_group + +from gradio.components import Button + +set_documentation_group("component") + + +@document() +class LogoutButton(Button): + """ + Button to log out a user from a Space. + """ + + is_template = True + + def __init__( + self, + *, + value: str = "Logout", + variant: Literal["primary", "secondary", "stop"] = "secondary", + size: Literal["sm", "lg"] | None = None, + icon: str + | None = "https://huggingface.co/front/assets/huggingface_logo-noborder.svg", + # Link to logout page (which will delete the session cookie and redirect to landing page). + link: str | None = "/logout", + visible: bool = True, + interactive: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + scale: int | None = 0, + min_width: int | None = None, + **kwargs, + ): + super().__init__( + value, + variant=variant, + size=size, + icon=icon, + link=link, + visible=visible, + interactive=interactive, + elem_id=elem_id, + elem_classes=elem_classes, + scale=scale, + min_width=min_width, + **kwargs, + ) diff --git a/testbed/gradio-app__gradio/gradio/components/markdown.py b/testbed/gradio-app__gradio/gradio/components/markdown.py new file mode 100644 index 0000000000000000000000000000000000000000..2898ee2379fbec2277caec365be0cf360bbfb557 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/markdown.py @@ -0,0 +1,103 @@ +"""gr.Markdown() component.""" + +from __future__ import annotations + +import inspect +import warnings +from typing import Any, Callable, Literal + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import StringSerializable + +from gradio.components.base import IOComponent, _Keywords +from gradio.events import ( + Changeable, +) + +set_documentation_group("component") + + +@document() +class Markdown(IOComponent, Changeable, StringSerializable): + """ + Used to render arbitrary Markdown output. Can also render latex enclosed by dollar signs. + Preprocessing: this component does *not* accept input. + Postprocessing: expects a valid {str} that can be rendered as Markdown. + + Demos: blocks_hello, blocks_kinematics + Guides: key-features + """ + + def __init__( + self, + value: str | Callable = "", + *, + rtl: bool = False, + latex_delimiters: list[dict[str, str | bool]] | None = None, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + sanitize_html: bool = True, + **kwargs, + ): + """ + Parameters: + value: Value to show in Markdown component. If callable, the function will be called whenever the app loads to set the initial value of the component. + rtl: If True, sets the direction of the rendered text to right-to-left. Default is False, which renders text left-to-right. + latex_delimiters: A list of dicts of the form {"left": open delimiter (str), "right": close delimiter (str), "display": whether to display in newline (bool)} that will be used to render LaTeX expressions. If not provided, `latex_delimiters` is set to `[{ "left": "$", "right": "$", "display": False }]`, so only expressions enclosed in $ delimiters will be rendered as LaTeX, and in the same line. Pass in an empty list to disable LaTeX rendering. For more information, see the [KaTeX documentation](https://katex.org/docs/autorender.html). + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + sanitize_html: If False, will disable HTML sanitization when converted from markdown. This is not recommended, as it can lead to security vulnerabilities. + """ + self.rtl = rtl + if latex_delimiters is None: + latex_delimiters = [{"left": "$", "right": "$", "display": False}] + self.latex_delimiters = latex_delimiters + self.sanitize_html = sanitize_html + + IOComponent.__init__( + self, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + def postprocess(self, y: str | None) -> str | None: + """ + Parameters: + y: markdown representation + Returns: + HTML rendering of markdown + """ + if y is None: + return None + unindented_y = inspect.cleandoc(y) + return unindented_y + + @staticmethod + def update( + value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + visible: bool | None = None, + rtl: bool | None = None, + latex_delimiters: list[dict[str, str | bool]] | None = None, + sanitize_html: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Markdown(...)` instead of `return gr.Markdown.update(...)`." + ) + updated_config = { + "visible": visible, + "value": value, + "rtl": rtl, + "latex_delimiters": latex_delimiters, + "sanitize_html": sanitize_html, + "__type__": "update", + } + return updated_config + + def as_example(self, input_data: str | None) -> str: + postprocessed = self.postprocess(input_data) + return postprocessed if postprocessed else "" diff --git a/testbed/gradio-app__gradio/gradio/components/plot.py b/testbed/gradio-app__gradio/gradio/components/plot.py new file mode 100644 index 0000000000000000000000000000000000000000..763f871c9d7b70bd32717e4edc1247c13641c5f9 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/plot.py @@ -0,0 +1,162 @@ +"""gr.Plot() component.""" + +from __future__ import annotations + +import json +import warnings +from types import ModuleType +from typing import Any, Callable, Literal + +import altair as alt +import pandas as pd +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import JSONSerializable + +from gradio import processing_utils +from gradio.components.base import IOComponent, _Keywords +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import Changeable, Clearable + +set_documentation_group("component") + + +@document() +class Plot(Changeable, Clearable, IOComponent, JSONSerializable): + """ + Used to display various kinds of plots (matplotlib, plotly, or bokeh are supported) + Preprocessing: this component does *not* accept input. + Postprocessing: expects either a {matplotlib.figure.Figure}, a {plotly.graph_objects._figure.Figure}, or a {dict} corresponding to a bokeh plot (json_item format) + + Demos: altair_plot, outbreak_forecast, blocks_kinematics, stock_forecast, map_airbnb + Guides: plot-component-for-maps + """ + + def __init__( + self, + value: Callable | None | pd.DataFrame = None, + *, + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + value: Optionally, supply a default plot object to display, must be a matplotlib, plotly, altair, or bokeh figure, or a callable. If callable, the function will be called whenever the app loads to set the initial value of the component. + label: component name in interface. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + IOComponent.__init__( + self, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + def get_config(self): + try: + import bokeh # type: ignore + + bokeh_version = bokeh.__version__ + except ImportError: + bokeh_version = None + + config = super().get_config() + config["bokeh_version"] = bokeh_version + return config + + @staticmethod + def update( + value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Plot(...)` instead of `return gr.Plot.update(...)`." + ) + updated_config = { + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "value": value, + "__type__": "update", + } + return updated_config + + def postprocess(self, y) -> dict[str, str] | None: + """ + Parameters: + y: plot data + Returns: + plot type mapped to plot base64 data + """ + import matplotlib.figure + + if y is None: + return None + if isinstance(y, (ModuleType, matplotlib.figure.Figure)): # type: ignore + dtype = "matplotlib" + out_y = processing_utils.encode_plot_to_base64(y) + elif "bokeh" in y.__module__: + dtype = "bokeh" + from bokeh.embed import json_item # type: ignore + + out_y = json.dumps(json_item(y)) + else: + is_altair = "altair" in y.__module__ + dtype = "altair" if is_altair else "plotly" + out_y = y.to_json() + return {"type": dtype, "plot": out_y} + + def style(self, container: bool | None = None): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if container is not None: + self.container = container + return self + + +class AltairPlot: + @staticmethod + def create_legend(position, title): + if position == "none": + legend = None + else: + position = {"orient": position} if position else {} + legend = {"title": title, **position} + + return legend + + @staticmethod + def create_scale(limit): + return alt.Scale(domain=limit) if limit else alt.Undefined diff --git a/testbed/gradio-app__gradio/gradio/components/scatter_plot.py b/testbed/gradio-app__gradio/gradio/components/scatter_plot.py new file mode 100644 index 0000000000000000000000000000000000000000..2369570eb87e668e602e1cdbd4f0b1e354f909e2 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/scatter_plot.py @@ -0,0 +1,498 @@ +"""gr.ScatterPlot() component.""" + +from __future__ import annotations + +import warnings +from typing import Callable, Literal + +import altair as alt +import pandas as pd +from gradio_client.documentation import document, set_documentation_group +from pandas.api.types import is_numeric_dtype + +from gradio.components.base import _Keywords +from gradio.components.plot import AltairPlot, Plot + +set_documentation_group("component") + + +@document() +class ScatterPlot(Plot): + """ + Create a scatter plot. + + Preprocessing: this component does *not* accept input. + Postprocessing: expects a pandas dataframe with the data to plot. + + Demos: scatter_plot + Guides: creating-a-dashboard-from-bigquery-data + """ + + def __init__( + self, + value: pd.DataFrame | Callable | None = None, + x: str | None = None, + y: str | None = None, + *, + color: str | None = None, + size: str | None = None, + shape: str | None = None, + title: str | None = None, + tooltip: list[str] | str | None = None, + x_title: str | None = None, + y_title: str | None = None, + x_label_angle: float | None = None, + y_label_angle: float | None = None, + color_legend_title: str | None = None, + size_legend_title: str | None = None, + shape_legend_title: str | None = None, + color_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + size_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + shape_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + height: int | None = None, + width: int | None = None, + x_lim: list[int | float] | None = None, + y_lim: list[int | float] | None = None, + caption: str | None = None, + interactive: bool | None = True, + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + show_actions_button: bool = False, + **kwargs, + ): + """ + Parameters: + value: The pandas dataframe containing the data to display in a scatter plot, or a callable. If callable, the function will be called whenever the app loads to set the initial value of the component. + x: Column corresponding to the x axis. + y: Column corresponding to the y axis. + color: The column to determine the point color. If the column contains numeric data, gradio will interpolate the column data so that small values correspond to light colors and large values correspond to dark values. + size: The column used to determine the point size. Should contain numeric data so that gradio can map the data to the point size. + shape: The column used to determine the point shape. Should contain categorical data. Gradio will map each unique value to a different shape. + title: The title to display on top of the chart. + tooltip: The column (or list of columns) to display on the tooltip when a user hovers a point on the plot. + x_title: The title given to the x-axis. By default, uses the value of the x parameter. + y_title: The title given to the y-axis. By default, uses the value of the y parameter. + x_label_angle: The angle for the x axis labels rotation. Positive values are clockwise, and negative values are counter-clockwise. + y_label_angle: The angle for the y axis labels rotation. Positive values are clockwise, and negative values are counter-clockwise. + color_legend_title: The title given to the color legend. By default, uses the value of color parameter. + size_legend_title: The title given to the size legend. By default, uses the value of the size parameter. + shape_legend_title: The title given to the shape legend. By default, uses the value of the shape parameter. + color_legend_position: The position of the color legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation. + size_legend_position: The position of the size legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation. + shape_legend_position: The position of the shape legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation. + height: The height of the plot in pixels. + width: The width of the plot in pixels. + x_lim: A tuple or list containing the limits for the x-axis, specified as [x_min, x_max]. + y_lim: A tuple of list containing the limits for the y-axis, specified as [y_min, y_max]. + caption: The (optional) caption to display below the plot. + interactive: Whether users should be able to interact with the plot by panning or zooming with their mouse or trackpad. + label: The (optional) label to display on the top left corner of the plot. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: Whether the label should be displayed. + visible: Whether the plot should be visible. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + show_actions_button: Whether to show the actions button on the top right corner of the plot. + """ + self.x = x + self.y = y + self.color = color + self.size = size + self.shape = shape + self.tooltip = tooltip + self.title = title + self.x_title = x_title + self.y_title = y_title + self.x_label_angle = x_label_angle + self.y_label_angle = y_label_angle + self.color_legend_title = color_legend_title + self.color_legend_position = color_legend_position + self.size_legend_title = size_legend_title + self.size_legend_position = size_legend_position + self.shape_legend_title = shape_legend_title + self.shape_legend_position = shape_legend_position + self.caption = caption + self.interactive_chart = interactive + self.width = width + self.height = height + self.x_lim = x_lim + self.y_lim = y_lim + self.show_actions_button = show_actions_button + super().__init__( + value=value, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + **kwargs, + ) + + def get_block_name(self) -> str: + return "plot" + + @staticmethod + def update( + value: pd.DataFrame | dict | Literal[_Keywords.NO_VALUE] = _Keywords.NO_VALUE, + x: str | None = None, + y: str | None = None, + color: str | None = None, + size: str | None = None, + shape: str | None = None, + title: str | None = None, + tooltip: list[str] | str | None = None, + x_title: str | None = None, + y_title: str | None = None, + x_label_angle: float | None = None, + y_label_angle: float | None = None, + color_legend_title: str | None = None, + size_legend_title: str | None = None, + shape_legend_title: str | None = None, + color_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + size_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + shape_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + height: int | None = None, + width: int | None = None, + x_lim: list[int | float] | None = None, + y_lim: list[int | float] | None = None, + interactive: bool | None = None, + caption: str | None = None, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + ): + """Update an existing plot component. + + If updating any of the plot properties (color, size, etc) the value, x, and y parameters must be specified. + + Parameters: + value: The pandas dataframe containing the data to display in a scatter plot. + x: Column corresponding to the x axis. + y: Column corresponding to the y axis. + color: The column to determine the point color. If the column contains numeric data, gradio will interpolate the column data so that small values correspond to light colors and large values correspond to dark values. + size: The column used to determine the point size. Should contain numeric data so that gradio can map the data to the point size. + shape: The column used to determine the point shape. Should contain categorical data. Gradio will map each unique value to a different shape. + title: The title to display on top of the chart. + tooltip: The column (or list of columns) to display on the tooltip when a user hovers a point on the plot. + x_title: The title given to the x axis. By default, uses the value of the x parameter. + y_title: The title given to the y axis. By default, uses the value of the y parameter. + x_label_angle: The angle for the x axis labels rotation. Positive values are clockwise, and negative values are counter-clockwise. + y_label_angle: The angle for the y axis labels rotation. Positive values are clockwise, and negative values are counter-clockwise. + color_legend_title: The title given to the color legend. By default, uses the value of color parameter. + size_legend_title: The title given to the size legend. By default, uses the value of the size parameter. + shape_legend_title: The title given to the shape legend. By default, uses the value of the shape parameter. + color_legend_position: The position of the color legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation. + size_legend_position: The position of the size legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation. + shape_legend_position: The position of the shape legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation. + height: The height of the plot in pixels. + width: The width of the plot in pixels. + x_lim: A tuple or list containing the limits for the x-axis, specified as [x_min, x_max]. + y_lim: A tuple of list containing the limits for the y-axis, specified as [y_min, y_max]. + interactive: Whether users should be able to interact with the plot by panning or zooming with their mouse or trackpad. + caption: The (optional) caption to display below the plot. + label: The (optional) label to display in the top left corner of the plot. + show_label: Whether the label should be displayed. + visible: Whether the plot should be visible. + """ + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.ScatterPlot(...)` instead of `return gr.ScatterPlot.update(...)`." + ) + properties = [ + x, + y, + color, + size, + shape, + title, + tooltip, + x_title, + y_title, + x_label_angle, + y_label_angle, + color_legend_title, + size_legend_title, + shape_legend_title, + color_legend_position, + size_legend_position, + shape_legend_position, + height, + width, + x_lim, + y_lim, + interactive, + ] + if any(properties): + if not isinstance(value, pd.DataFrame): + raise ValueError( + "In order to update plot properties the value parameter " + "must be provided, and it must be a Dataframe. Please pass a value " + "parameter to gr.ScatterPlot.update." + ) + if x is None or y is None: + raise ValueError( + "In order to update plot properties, the x and y axis data " + "must be specified. Please pass valid values for x an y to " + "gr.ScatterPlot.update." + ) + chart = ScatterPlot.create_plot(value, *properties) + value = {"type": "altair", "plot": chart.to_json(), "chart": "scatter"} + + updated_config = { + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "value": value, + "caption": caption, + "__type__": "update", + } + return updated_config + + @staticmethod + def create_plot( + value: pd.DataFrame, + x: str, + y: str, + color: str | None = None, + size: str | None = None, + shape: str | None = None, + title: str | None = None, + tooltip: list[str] | str | None = None, + x_title: str | None = None, + y_title: str | None = None, + x_label_angle: float | None = None, + y_label_angle: float | None = None, + color_legend_title: str | None = None, + size_legend_title: str | None = None, + shape_legend_title: str | None = None, + color_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + size_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + shape_legend_position: Literal[ + "left", + "right", + "top", + "bottom", + "top-left", + "top-right", + "bottom-left", + "bottom-right", + "none", + ] + | None = None, + height: int | None = None, + width: int | None = None, + x_lim: list[int | float] | None = None, + y_lim: list[int | float] | None = None, + interactive: bool | None = True, + ): + """Helper for creating the scatter plot.""" + interactive = True if interactive is None else interactive + encodings = { + "x": alt.X( + x, # type: ignore + title=x_title or x, # type: ignore + scale=AltairPlot.create_scale(x_lim), # type: ignore + axis=alt.Axis(labelAngle=x_label_angle) + if x_label_angle is not None + else alt.Axis(), + ), # ignore: type + "y": alt.Y( + y, # type: ignore + title=y_title or y, # type: ignore + scale=AltairPlot.create_scale(y_lim), # type: ignore + axis=alt.Axis(labelAngle=y_label_angle) + if y_label_angle is not None + else alt.Axis(), + ), + } + properties = {} + if title: + properties["title"] = title + if height: + properties["height"] = height + if width: + properties["width"] = width + if color: + if is_numeric_dtype(value[color]): + domain = [value[color].min(), value[color].max()] + range_ = [0, 1] + type_ = "quantitative" + else: + domain = value[color].unique().tolist() + range_ = list(range(len(domain))) + type_ = "nominal" + + encodings["color"] = { + "field": color, + "type": type_, + "legend": AltairPlot.create_legend( + position=color_legend_position, title=color_legend_title or color + ), + "scale": {"domain": domain, "range": range_}, + } + if tooltip: + encodings["tooltip"] = tooltip + if size: + encodings["size"] = { + "field": size, + "type": "quantitative" if is_numeric_dtype(value[size]) else "nominal", + "legend": AltairPlot.create_legend( + position=size_legend_position, title=size_legend_title or size + ), + } + if shape: + encodings["shape"] = { + "field": shape, + "type": "quantitative" if is_numeric_dtype(value[shape]) else "nominal", + "legend": AltairPlot.create_legend( + position=shape_legend_position, title=shape_legend_title or shape + ), + } + chart = ( + alt.Chart(value) # type: ignore + .mark_point(clip=True) # type: ignore + .encode(**encodings) + .properties(background="transparent", **properties) + ) + if interactive: + chart = chart.interactive() + + return chart + + def postprocess(self, y: pd.DataFrame | dict | None) -> dict[str, str] | None: + # if None or update + if y is None or isinstance(y, dict): + return y + if self.x is None or self.y is None: + raise ValueError("No value provided for required parameters `x` and `y`.") + chart = self.create_plot( + value=y, + x=self.x, + y=self.y, + color=self.color, + size=self.size, + shape=self.shape, + title=self.title, + tooltip=self.tooltip, + x_title=self.x_title, + y_title=self.y_title, + x_label_angle=self.x_label_angle, + y_label_angle=self.y_label_angle, + color_legend_title=self.color_legend_title, + size_legend_title=self.size_legend_title, + shape_legend_title=self.size_legend_title, + color_legend_position=self.color_legend_position, # type: ignore + size_legend_position=self.size_legend_position, # type: ignore + shape_legend_position=self.shape_legend_position, # type: ignore + interactive=self.interactive_chart, + height=self.height, + width=self.width, + x_lim=self.x_lim, + y_lim=self.y_lim, + ) + + return {"type": "altair", "plot": chart.to_json(), "chart": "scatter"} diff --git a/testbed/gradio-app__gradio/gradio/components/slider.py b/testbed/gradio-app__gradio/gradio/components/slider.py new file mode 100644 index 0000000000000000000000000000000000000000..514fec178b6a57f70704bf141b83ade64fa91970 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/slider.py @@ -0,0 +1,205 @@ +"""gr.Slider() component.""" + +from __future__ import annotations + +import math +import random +import warnings +from typing import Any, Callable, Literal + +import numpy as np +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import NumberSerializable + +from gradio.components.base import FormComponent, IOComponent, _Keywords +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import Changeable, Inputable, Releaseable +from gradio.interpretation import NeighborInterpretable + +set_documentation_group("component") + + +@document() +class Slider( + FormComponent, + Changeable, + Inputable, + Releaseable, + IOComponent, + NumberSerializable, + NeighborInterpretable, +): + """ + Creates a slider that ranges from `minimum` to `maximum` with a step size of `step`. + Preprocessing: passes slider value as a {float} into the function. + Postprocessing: expects an {int} or {float} returned from function and sets slider value to it as long as it is within range. + Examples-format: A {float} or {int} representing the slider's value. + + Demos: sentence_builder, slider_release, generate_tone, titanic_survival, interface_random_slider, blocks_random_slider + Guides: create-your-own-friends-with-a-gan + """ + + def __init__( + self, + minimum: float = 0, + maximum: float = 100, + value: float | Callable | None = None, + *, + step: float | None = None, + label: str | None = None, + info: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + randomize: bool = False, + **kwargs, + ): + """ + Parameters: + minimum: minimum value for slider. + maximum: maximum value for slider. + value: default value. If callable, the function will be called whenever the app loads to set the initial value of the component. Ignored if randomized=True. + step: increment between slider values. + label: component name in interface. + info: additional component description. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: if True, slider will be adjustable; if False, adjusting will be disabled. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + randomize: If True, the value of the slider when the app loads is taken uniformly at random from the range given by the minimum and maximum. + """ + self.minimum = minimum + self.maximum = maximum + if step is None: + difference = maximum - minimum + power = math.floor(math.log10(difference) - 2) + self.step = 10**power + else: + self.step = step + if randomize: + value = self.get_random_value + IOComponent.__init__( + self, + label=label, + info=info, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + interactive=interactive, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + NeighborInterpretable.__init__(self) + + def api_info(self) -> dict[str, dict | bool]: + return { + "info": { + "type": "number", + "description": f"numeric value between {self.minimum} and {self.maximum}", + }, + "serialized_info": False, + } + + def example_inputs(self) -> dict[str, Any]: + return { + "raw": self.minimum, + "serialized": self.minimum, + } + + def get_random_value(self): + n_steps = int((self.maximum - self.minimum) / self.step) + step = random.randint(0, n_steps) + value = self.minimum + step * self.step + # Round to number of decimals in step so that UI doesn't display long decimals + n_decimals = max(str(self.step)[::-1].find("."), 0) + if n_decimals: + value = round(value, n_decimals) + return value + + @staticmethod + def update( + value: float | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + minimum: float | None = None, + maximum: float | None = None, + step: float | None = None, + label: str | None = None, + info: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + interactive: bool | None = None, + visible: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Slider(...)` instead of `return gr.Slider.update(...)`." + ) + return { + "minimum": minimum, + "maximum": maximum, + "step": step, + "label": label, + "info": info, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "interactive": interactive, + "visible": visible, + "value": value, + "__type__": "update", + } + + def postprocess(self, y: float | None) -> float | None: + """ + Any postprocessing needed to be performed on function output. + Parameters: + y: numeric output + Returns: + numeric output or minimum number if None + """ + return self.minimum if y is None else y + + def set_interpret_parameters(self, steps: int = 8) -> Slider: + """ + Calculates interpretation scores of numeric values ranging between the minimum and maximum values of the slider. + Parameters: + steps: Number of neighboring values to measure between the minimum and maximum values of the slider range. + """ + self.interpretation_steps = steps + return self + + def get_interpretation_neighbors(self, x) -> tuple[object, dict]: + return ( + np.linspace(self.minimum, self.maximum, self.interpretation_steps).tolist(), + {}, + ) + + def style( + self, + *, + container: bool | None = None, + ): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if container is not None: + self.container = container + return self diff --git a/testbed/gradio-app__gradio/gradio/components/state.py b/testbed/gradio-app__gradio/gradio/components/state.py new file mode 100644 index 0000000000000000000000000000000000000000..9722fa31e5240b8975af313d58bbcd83bb235fcd --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/state.py @@ -0,0 +1,50 @@ +"""gr.State() component.""" + +from __future__ import annotations + +from copy import deepcopy +from typing import Any + +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import SimpleSerializable + +from gradio.components.base import IOComponent + +set_documentation_group("component") + + +@document() +class State(IOComponent, SimpleSerializable): + """ + Special hidden component that stores session state across runs of the demo by the + same user. The value of the State variable is cleared when the user refreshes the page. + + Preprocessing: No preprocessing is performed + Postprocessing: No postprocessing is performed + Demos: blocks_simple_squares + Guides: real-time-speech-recognition + """ + + allow_string_shortcut = False + + def __init__( + self, + value: Any = None, + **kwargs, + ): + """ + Parameters: + value: the initial value (of arbitrary type) of the state. The provided argument is deepcopied. If a callable is provided, the function will be called whenever the app loads to set the initial value of the state. + """ + self.stateful = True + IOComponent.__init__(self, value=deepcopy(value), **kwargs) + + +class Variable(State): + """Variable was renamed to State. This class is kept for backwards compatibility.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def get_block_name(self): + return "state" diff --git a/testbed/gradio-app__gradio/gradio/components/status_tracker.py b/testbed/gradio-app__gradio/gradio/components/status_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..a9abec2969d93846fc81d3572942bef6afc8f3f9 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/status_tracker.py @@ -0,0 +1,13 @@ +"""gr.StatusTracker() component.""" +from gradio_client.serializing import SimpleSerializable + +from gradio.components.base import Component +from gradio.deprecation import warn_deprecation + + +class StatusTracker(Component, SimpleSerializable): + def __init__( + self, + **kwargs, + ): + warn_deprecation("The StatusTracker component is deprecated.") diff --git a/testbed/gradio-app__gradio/gradio/components/textbox.py b/testbed/gradio-app__gradio/gradio/components/textbox.py new file mode 100644 index 0000000000000000000000000000000000000000..7b8bed6e19a4a7a90863a744dcaf8a9ff50a703e --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/textbox.py @@ -0,0 +1,278 @@ +"""gr.Textbox() component.""" + +from __future__ import annotations + +import warnings +from typing import Callable, Literal + +import numpy as np +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import StringSerializable + +from gradio.components.base import ( + FormComponent, + IOComponent, + _Keywords, +) +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import ( + Changeable, + EventListenerMethod, + Focusable, + Inputable, + Selectable, + Submittable, +) +from gradio.interpretation import TokenInterpretable + +set_documentation_group("component") + + +@document() +class Textbox( + FormComponent, + Changeable, + Inputable, + Selectable, + Submittable, + Focusable, + IOComponent, + StringSerializable, + TokenInterpretable, +): + """ + Creates a textarea for user to enter string input or display string output. + Preprocessing: passes textarea value as a {str} into the function. + Postprocessing: expects a {str} returned from function and sets textarea value to it. + Examples-format: a {str} representing the textbox input. + + Demos: hello_world, diff_texts, sentence_builder + Guides: creating-a-chatbot, real-time-speech-recognition + """ + + def __init__( + self, + value: str | Callable | None = "", + *, + lines: int = 1, + max_lines: int = 20, + placeholder: str | None = None, + label: str | None = None, + info: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + autofocus: bool = False, + autoscroll: bool = True, + elem_classes: list[str] | str | None = None, + type: Literal["text", "password", "email"] = "text", + text_align: Literal["left", "right"] | None = None, + rtl: bool = False, + show_copy_button: bool = False, + **kwargs, + ): + """ + Parameters: + value: default text to provide in textarea. If callable, the function will be called whenever the app loads to set the initial value of the component. + lines: minimum number of line rows to provide in textarea. + max_lines: maximum number of line rows to provide in textarea. + placeholder: placeholder hint to provide behind textarea. + label: component name in interface. + info: additional component description. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: if True, will be rendered as an editable textbox; if False, editing will be disabled. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + autofocus: If True, will focus on the textbox when the page loads. Use this carefully, as it can cause usability issues for sighted and non-sighted users. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + type: The type of textbox. One of: 'text', 'password', 'email', Default is 'text'. + text_align: How to align the text in the textbox, can be: "left", "right", or None (default). If None, the alignment is left if `rtl` is False, or right if `rtl` is True. Can only be changed if `type` is "text". + rtl: If True and `type` is "text", sets the direction of the text to right-to-left (cursor appears on the left of the text). Default is False, which renders cursor on the right. + show_copy_button: If True, includes a copy button to copy the text in the textbox. Only applies if show_label is True. + autoscroll: If True, will automatically scroll to the bottom of the textbox when the value changes, unless the user scrolls up. If False, will not scroll to the bottom of the textbox when the value changes. + """ + if type not in ["text", "password", "email"]: + raise ValueError('`type` must be one of "text", "password", or "email".') + + self.lines = lines + if type == "text": + self.max_lines = max(lines, max_lines) + else: + self.max_lines = 1 + self.placeholder = placeholder + self.show_copy_button = show_copy_button + self.autofocus = autofocus + self.select: EventListenerMethod + self.autoscroll = autoscroll + """ + Event listener for when the user selects text in the Textbox. + Uses event data gradio.SelectData to carry `value` referring to selected substring, and `index` tuple referring to selected range endpoints. + See EventData documentation on how to use this event data. + """ + IOComponent.__init__( + self, + label=label, + info=info, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + interactive=interactive, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + TokenInterpretable.__init__(self) + self.type = type + self.rtl = rtl + self.text_align = text_align + + @staticmethod + def update( + value: str | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, + lines: int | None = None, + max_lines: int | None = None, + placeholder: str | None = None, + label: str | None = None, + info: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + visible: bool | None = None, + interactive: bool | None = None, + type: Literal["text", "password", "email"] | None = None, + text_align: Literal["left", "right"] | None = None, + rtl: bool | None = None, + show_copy_button: bool | None = None, + autofocus: bool | None = None, + autoscroll: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Textbox(...)` instead of `return gr.Textbox.update(...)`." + ) + return { + "lines": lines, + "max_lines": max_lines, + "placeholder": placeholder, + "label": label, + "info": info, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "visible": visible, + "value": value, + "type": type, + "interactive": interactive, + "show_copy_button": show_copy_button, + "autofocus": autofocus, + "text_align": text_align, + "rtl": rtl, + "autoscroll": autoscroll, + "__type__": "update", + } + + def preprocess(self, x: str | None) -> str | None: + """ + Preprocesses input (converts it to a string) before passing it to the function. + Parameters: + x: text + Returns: + text + """ + return None if x is None else str(x) + + def postprocess(self, y: str | None) -> str | None: + """ + Postproccess the function output y by converting it to a str before passing it to the frontend. + Parameters: + y: function output to postprocess. + Returns: + text + """ + return None if y is None else str(y) + + def set_interpret_parameters( + self, separator: str = " ", replacement: str | None = None + ): + """ + Calculates interpretation score of characters in input by splitting input into tokens, then using a "leave one out" method to calculate the score of each token by removing each token and measuring the delta of the output value. + Parameters: + separator: Separator to use to split input into tokens. + replacement: In the "leave one out" step, the text that the token should be replaced with. If None, the token is removed altogether. + """ + self.interpretation_separator = separator + self.interpretation_replacement = replacement + return self + + def tokenize(self, x: str) -> tuple[list[str], list[str], None]: + """ + Tokenizes an input string by dividing into "words" delimited by self.interpretation_separator + """ + tokens = x.split(self.interpretation_separator) + leave_one_out_strings = [] + for index in range(len(tokens)): + leave_one_out_set = list(tokens) + if self.interpretation_replacement is None: + leave_one_out_set.pop(index) + else: + leave_one_out_set[index] = self.interpretation_replacement + leave_one_out_strings.append( + self.interpretation_separator.join(leave_one_out_set) + ) + return tokens, leave_one_out_strings, None + + def get_masked_inputs( + self, tokens: list[str], binary_mask_matrix: list[list[int]] + ) -> list[str]: + """ + Constructs partially-masked sentences for SHAP interpretation + """ + masked_inputs = [] + for binary_mask_vector in binary_mask_matrix: + masked_input = np.array(tokens)[np.array(binary_mask_vector, dtype=bool)] + masked_inputs.append(self.interpretation_separator.join(masked_input)) + return masked_inputs + + def get_interpretation_scores( + self, x, neighbors, scores: list[float], tokens: list[str], masks=None, **kwargs + ) -> list[tuple[str, float]]: + """ + Returns: + Each tuple set represents a set of characters and their corresponding interpretation score. + """ + result = [] + for token, score in zip(tokens, scores): + result.append((token, score)) + result.append((self.interpretation_separator, 0)) + return result + + def style( + self, + *, + show_copy_button: bool | None = None, + container: bool | None = None, + **kwargs, + ): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if show_copy_button is not None: + self.show_copy_button = show_copy_button + if container is not None: + self.container = container + return self diff --git a/testbed/gradio-app__gradio/gradio/components/upload_button.py b/testbed/gradio-app__gradio/gradio/components/upload_button.py new file mode 100644 index 0000000000000000000000000000000000000000..fadf375d250dd773691f674610d4f6be46a5b204 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/upload_button.py @@ -0,0 +1,200 @@ +"""gr.UploadButton() component.""" + +from __future__ import annotations + +import tempfile +import warnings +from typing import Any, Callable, Literal + +from gradio_client import utils as client_utils +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import FileSerializable + +from gradio import utils +from gradio.components.base import IOComponent, _Keywords +from gradio.deprecation import warn_deprecation, warn_style_method_deprecation +from gradio.events import Clickable, Uploadable + +set_documentation_group("component") + + +@document() +class UploadButton(Clickable, Uploadable, IOComponent, FileSerializable): + """ + Used to create an upload button, when clicked allows a user to upload files that satisfy the specified file type or generic files (if file_type not set). + Preprocessing: passes the uploaded file as a {file-object} or {List[file-object]} depending on `file_count` (or a {bytes}/{List{bytes}} depending on `type`) + Postprocessing: expects function to return a {str} path to a file, or {List[str]} consisting of paths to files. + Examples-format: a {str} path to a local file that populates the component. + Demos: upload_button + """ + + def __init__( + self, + label: str = "Upload a File", + value: str | list[str] | Callable | None = None, + *, + variant: Literal["primary", "secondary", "stop"] = "secondary", + visible: bool = True, + size: Literal["sm", "lg"] | None = None, + scale: int | None = None, + min_width: int | None = None, + interactive: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + type: Literal["file", "bytes"] = "file", + file_count: Literal["single", "multiple", "directory"] = "single", + file_types: list[str] | None = None, + **kwargs, + ): + """ + Parameters: + label: Text to display on the button. Defaults to "Upload a File". + value: File or list of files to upload by default. + variant: 'primary' for main call-to-action, 'secondary' for a more subdued style, 'stop' for a stop button. + visible: If False, component will be hidden. + size: Size of the button. Can be "sm" or "lg". + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: If False, the UploadButton will be in a disabled state. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + type: Type of value to be returned by component. "file" returns a temporary file object with the same base name as the uploaded file, whose full path can be retrieved by file_obj.name, "binary" returns an bytes object. + file_count: if single, allows user to upload one file. If "multiple", user uploads multiple files. If "directory", user uploads all files in selected directory. Return type will be list for each file in case of "multiple" or "directory". + file_types: List of type of files to be uploaded. "file" allows any file to be uploaded, "image" allows only image files to be uploaded, "audio" allows only audio files to be uploaded, "video" allows only video files to be uploaded, "text" allows only text files to be uploaded. + """ + self.type = type + self.file_count = file_count + if file_count == "directory" and file_types is not None: + warnings.warn( + "The `file_types` parameter is ignored when `file_count` is 'directory'." + ) + if file_types is not None and not isinstance(file_types, list): + raise ValueError( + f"Parameter file_types must be a list. Received {file_types.__class__.__name__}" + ) + self.size = size + self.file_types = file_types + self.label = label + self.variant = variant + IOComponent.__init__( + self, + label=label, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + scale=scale, + min_width=min_width, + interactive=interactive, + **kwargs, + ) + + @staticmethod + def update( + value: str + | list[str] + | Literal[_Keywords.NO_VALUE] + | None = _Keywords.NO_VALUE, + label: str | None = None, + size: Literal["sm", "lg"] | None = None, + variant: Literal["primary", "secondary", "stop"] | None = None, + interactive: bool | None = None, + visible: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.UploadButton(...)` instead of `return gr.UploadButton.update(...)`." + ) + return { + "variant": variant, + "interactive": interactive, + "size": size, + "visible": visible, + "value": value, + "scale": scale, + "min_width": min_width, + "label": label, + "__type__": "update", + } + + def preprocess( + self, x: list[dict[str, Any]] | None + ) -> ( + bytes + | tempfile._TemporaryFileWrapper + | list[bytes | tempfile._TemporaryFileWrapper] + | None + ): + """ + Parameters: + x: List of JSON objects with filename as 'name' property and base64 data as 'data' property + Returns: + File objects in requested format + """ + if x is None: + return None + + def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper: + file_name, data, is_file = ( + f["name"], + f["data"], + f.get("is_file", False), + ) + if self.type == "file": + if is_file: + path = self.make_temp_copy_if_needed(file_name) + else: + data, _ = client_utils.decode_base64_to_binary(data) + path = self.file_bytes_to_file(data, file_name=file_name) + path = str(utils.abspath(path)) + self.temp_files.add(path) + file = tempfile.NamedTemporaryFile( + delete=False, dir=self.DEFAULT_TEMP_DIR + ) + file.name = path + file.orig_name = file_name # type: ignore + return file + elif self.type == "bytes": + if is_file: + with open(file_name, "rb") as file_data: + return file_data.read() + return client_utils.decode_base64_to_binary(data)[0] + else: + raise ValueError( + "Unknown type: " + + str(self.type) + + ". Please choose from: 'file', 'bytes'." + ) + + if self.file_count == "single": + if isinstance(x, list): + return process_single_file(x[0]) + else: + return process_single_file(x) + else: + if isinstance(x, list): + return [process_single_file(f) for f in x] + else: + return process_single_file(x) + + def style( + self, + *, + full_width: bool | None = None, + size: Literal["sm", "lg"] | None = None, + **kwargs, + ): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if full_width is not None: + warn_deprecation( + "Use `scale` in place of full_width in the constructor. " + "scale=1 will make the button expand, whereas 0 will not." + ) + self.scale = 1 if full_width else None + if size is not None: + self.size = size + return self diff --git a/testbed/gradio-app__gradio/gradio/components/video.py b/testbed/gradio-app__gradio/gradio/components/video.py new file mode 100644 index 0000000000000000000000000000000000000000..684558458610ff90a1f33abf8922167d9fa2dc77 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/components/video.py @@ -0,0 +1,410 @@ +"""gr.Video() component.""" + +from __future__ import annotations + +import tempfile +import warnings +from pathlib import Path +from typing import Callable, Literal + +from gradio_client import utils as client_utils +from gradio_client.data_classes import FileData +from gradio_client.documentation import document, set_documentation_group +from gradio_client.serializing import VideoSerializable + +from gradio import processing_utils, utils, wasm_utils +from gradio.components.base import IOComponent, _Keywords +from gradio.deprecation import warn_style_method_deprecation +from gradio.events import Changeable, Clearable, Playable, Recordable, Uploadable + +if not wasm_utils.IS_WASM: + # TODO: Support ffmpeg on Wasm + from ffmpy import FFmpeg + +set_documentation_group("component") + + +@document() +class Video( + Changeable, + Clearable, + Playable, + Recordable, + Uploadable, + IOComponent, + VideoSerializable, +): + """ + Creates a video component that can be used to upload/record videos (as an input) or display videos (as an output). + For the video to be playable in the browser it must have a compatible container and codec combination. Allowed + combinations are .mp4 with h264 codec, .ogg with theora codec, and .webm with vp9 codec. If the component detects + that the output video would not be playable in the browser it will attempt to convert it to a playable mp4 video. + If the conversion fails, the original video is returned. + Preprocessing: passes the uploaded video as a {str} filepath or URL whose extension can be modified by `format`. + Postprocessing: expects a {str} or {pathlib.Path} filepath to a video which is displayed, or a {Tuple[str | pathlib.Path, str | pathlib.Path | None]} where the first element is a filepath to a video and the second element is an optional filepath to a subtitle file. + Examples-format: a {str} filepath to a local file that contains the video, or a {Tuple[str, str]} where the first element is a filepath to a video file and the second element is a filepath to a subtitle file. + Demos: video_identity, video_subtitle + """ + + def __init__( + self, + value: str + | Path + | tuple[str | Path, str | Path | None] + | Callable + | None = None, + *, + format: str | None = None, + source: Literal["upload", "webcam"] = "upload", + height: int | None = None, + width: int | None = None, + label: str | None = None, + every: float | None = None, + show_label: bool | None = None, + container: bool = True, + scale: int | None = None, + min_width: int = 160, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + mirror_webcam: bool = True, + include_audio: bool | None = None, + autoplay: bool = False, + show_share_button: bool | None = None, + **kwargs, + ): + """ + Parameters: + value: A path or URL for the default value that Video component is going to take. Can also be a tuple consisting of (video filepath, subtitle filepath). If a subtitle file is provided, it should be of type .srt or .vtt. Or can be callable, in which case the function will be called whenever the app loads to set the initial value of the component. + format: Format of video format to be returned by component, such as 'avi' or 'mp4'. Use 'mp4' to ensure browser playability. If set to None, video will keep uploaded format. + source: Source of video. "upload" creates a box where user can drop an video file, "webcam" allows user to record a video from their webcam. + height: Height of the displayed video in pixels. + width: Width of the displayed video in pixels. + label: component name in interface. + every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute. + show_label: if True, will display label. + container: If True, will place the component in a container - providing some extra padding around the border. + scale: relative width compared to adjacent Components in a Row. For example, if Component A has scale=2, and Component B has scale=1, A will be twice as wide as B. Should be an integer. + min_width: minimum pixel width, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in this Component being narrower than min_width, the min_width parameter will be respected first. + interactive: if True, will allow users to upload a video; if False, can only be used to display videos. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles. + mirror_webcam: If True webcam will be mirrored. Default is True. + include_audio: Whether the component should record/retain the audio track for a video. By default, audio is excluded for webcam videos and included for uploaded videos. + autoplay: Whether to automatically play the video when the component is used as an output. Note: browsers will not autoplay video files if the user has not interacted with the page yet. + show_share_button: If True, will show a share icon in the corner of the component that allows user to share outputs to Hugging Face Spaces Discussions. If False, icon does not appear. If set to None (default behavior), then the icon appears if this Gradio app is launched on Spaces, but not otherwise. + """ + self.format = format + self.autoplay = autoplay + valid_sources = ["upload", "webcam"] + if source not in valid_sources: + raise ValueError( + f"Invalid value for parameter `source`: {source}. Please choose from one of: {valid_sources}" + ) + self.source = source + self.height = height + self.width = width + self.mirror_webcam = mirror_webcam + self.include_audio = ( + include_audio if include_audio is not None else source == "upload" + ) + self.show_share_button = ( + (utils.get_space() is not None) + if show_share_button is None + else show_share_button + ) + IOComponent.__init__( + self, + label=label, + every=every, + show_label=show_label, + container=container, + scale=scale, + min_width=min_width, + interactive=interactive, + visible=visible, + elem_id=elem_id, + elem_classes=elem_classes, + value=value, + **kwargs, + ) + + @staticmethod + def update( + value: str + | tuple[str, str | None] + | Literal[_Keywords.NO_VALUE] + | None = _Keywords.NO_VALUE, + source: Literal["upload", "webcam"] | None = None, + height: int | None = None, + width: int | None = None, + label: str | None = None, + show_label: bool | None = None, + container: bool | None = None, + scale: int | None = None, + min_width: int | None = None, + interactive: bool | None = None, + visible: bool | None = None, + autoplay: bool | None = None, + show_share_button: bool | None = None, + ): + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Video(...)` instead of `return gr.Video.update(...)`." + ) + return { + "source": source, + "height": height, + "width": width, + "label": label, + "show_label": show_label, + "container": container, + "scale": scale, + "min_width": min_width, + "interactive": interactive, + "visible": visible, + "value": value, + "autoplay": autoplay, + "show_share_button": show_share_button, + "__type__": "update", + } + + def preprocess( + self, x: tuple[FileData, FileData | None] | FileData | None + ) -> str | None: + """ + Parameters: + x: A tuple of (video file data, subtitle file data) or just video file data. + Returns: + A string file path or URL to the preprocessed video. Subtitle file data is ignored. + """ + if x is None: + return None + elif isinstance(x, dict): + video = x + else: + video = x[0] + + file_name, file_data, is_file = ( + video.get("name"), + video["data"], + video.get("is_file", False), + ) + + if is_file: + assert file_name is not None, "Received file data without a file name." + if client_utils.is_http_url_like(file_name): + fn = self.download_temp_copy_if_needed + else: + fn = self.make_temp_copy_if_needed + file_name = Path(fn(file_name)) + else: + assert file_data is not None, "Received empty file data." + file_name = Path(self.base64_to_temp_file_if_needed(file_data, file_name)) + + uploaded_format = file_name.suffix.replace(".", "") + needs_formatting = self.format is not None and uploaded_format != self.format + flip = self.source == "webcam" and self.mirror_webcam + + if needs_formatting or flip: + format = f".{self.format if needs_formatting else uploaded_format}" + output_options = ["-vf", "hflip", "-c:a", "copy"] if flip else [] + output_options += ["-an"] if not self.include_audio else [] + flip_suffix = "_flip" if flip else "" + output_file_name = str( + file_name.with_name(f"{file_name.stem}{flip_suffix}{format}") + ) + if Path(output_file_name).exists(): + return output_file_name + if wasm_utils.IS_WASM: + raise wasm_utils.WasmUnsupportedError( + "Video formatting is not supported in the Wasm mode." + ) + ff = FFmpeg( + inputs={str(file_name): None}, + outputs={output_file_name: output_options}, + ) + ff.run() + return output_file_name + elif not self.include_audio: + output_file_name = str(file_name.with_name(f"muted_{file_name.name}")) + if Path(output_file_name).exists(): + return output_file_name + if wasm_utils.IS_WASM: + raise wasm_utils.WasmUnsupportedError( + "include_audio=False is not supported in the Wasm mode." + ) + ff = FFmpeg( + inputs={str(file_name): None}, + outputs={output_file_name: ["-an"]}, + ) + ff.run() + return output_file_name + else: + return str(file_name) + + def postprocess( + self, y: str | Path | tuple[str | Path, str | Path | None] | None + ) -> tuple[FileData | None, FileData | None] | None: + """ + Processes a video to ensure that it is in the correct format before returning it to the front end. + Parameters: + y: video data in either of the following formats: a tuple of (video filepath, optional subtitle filepath), or just a filepath or URL to an video file, or None. + Returns: + a tuple with the two dictionary, reresent to video and (optional) subtitle, which following formats: + - The first dictionary represents the video file and contains the following keys: + - 'name': a file path to a temporary copy of the processed video. + - 'data': None + - 'is_file': True + - The second dictionary represents the subtitle file and contains the following keys: + - 'name': None + - 'data': Base64 encode the processed subtitle data. + - 'is_file': False + - If subtitle is None, returns (video, None). + - If both video and subtitle are None, returns None. + """ + + if y is None or y == [None, None] or y == (None, None): + return None + if isinstance(y, (str, Path)): + processed_files = (self._format_video(y), None) + elif isinstance(y, (tuple, list)): + assert ( + len(y) == 2 + ), f"Expected lists of length 2 or tuples of length 2. Received: {y}" + assert isinstance(y[0], (str, Path)) and isinstance( + y[1], (str, Path) + ), f"If a tuple is provided, both elements must be strings or Path objects. Received: {y}" + video = y[0] + subtitle = y[1] + processed_files = ( + self._format_video(video), + self._format_subtitle(subtitle), + ) + else: + raise Exception(f"Cannot process type as video: {type(y)}") + + return processed_files + + def _format_video(self, video: str | Path | None) -> FileData | None: + """ + Processes a video to ensure that it is in the correct format. + Parameters: + video: video data in either of the following formats: a string filepath or URL to an video file, or None. + Returns: + a dictionary with the following keys: + + - 'name': a file path to a temporary copy of the processed video. + - 'data': None + - 'is_file': True + """ + if video is None: + return None + video = str(video) + returned_format = video.split(".")[-1].lower() + if self.format is None or returned_format == self.format: + conversion_needed = False + else: + conversion_needed = True + + is_url = client_utils.is_http_url_like(video) + + # For cases where the video is a URL and does not need to be converted to another format, we can just return the URL + if is_url and not (conversion_needed): + return {"name": video, "data": None, "is_file": True} + + # For cases where the video needs to be converted to another format + if is_url: + video = self.download_temp_copy_if_needed(video) + if ( + processing_utils.ffmpeg_installed() + and not processing_utils.video_is_playable(video) + ): + warnings.warn( + "Video does not have browser-compatible container or codec. Converting to mp4" + ) + video = processing_utils.convert_video_to_playable_mp4(video) + # Recalculate the format in case convert_video_to_playable_mp4 already made it the + # selected format + returned_format = video.split(".")[-1].lower() + if self.format is not None and returned_format != self.format: + if wasm_utils.IS_WASM: + raise wasm_utils.WasmUnsupportedError( + "Returning a video in a different format is not supported in the Wasm mode." + ) + output_file_name = video[0 : video.rindex(".") + 1] + self.format + ff = FFmpeg( + inputs={video: None}, + outputs={output_file_name: None}, + global_options="-y", + ) + ff.run() + video = output_file_name + + video = self.make_temp_copy_if_needed(video) + + return { + "name": video, + "data": None, + "is_file": True, + "orig_name": Path(video).name, + } + + def _format_subtitle(self, subtitle: str | Path | None) -> FileData | None: + """ + Convert subtitle format to VTT and process the video to ensure it meets the HTML5 requirements. + Parameters: + subtitle: subtitle path in either of the VTT and SRT format. + Returns: + a dictionary with the following keys: + - 'name': None + - 'data': base64-encoded subtitle data. + - 'is_file': False + """ + + def srt_to_vtt(srt_file_path, vtt_file_path): + """Convert an SRT subtitle file to a VTT subtitle file""" + with open(srt_file_path, encoding="utf-8") as srt_file, open( + vtt_file_path, "w", encoding="utf-8" + ) as vtt_file: + vtt_file.write("WEBVTT\n\n") + for subtitle_block in srt_file.read().strip().split("\n\n"): + subtitle_lines = subtitle_block.split("\n") + subtitle_timing = subtitle_lines[1].replace(",", ".") + subtitle_text = "\n".join(subtitle_lines[2:]) + vtt_file.write(f"{subtitle_timing} --> {subtitle_timing}\n") + vtt_file.write(f"{subtitle_text}\n\n") + + if subtitle is None: + return None + + valid_extensions = (".srt", ".vtt") + + if Path(subtitle).suffix not in valid_extensions: + raise ValueError( + f"Invalid value for parameter `subtitle`: {subtitle}. Please choose a file with one of these extensions: {valid_extensions}" + ) + + # HTML5 only support vtt format + if Path(subtitle).suffix == ".srt": + temp_file = tempfile.NamedTemporaryFile( + delete=False, suffix=".vtt", dir=self.DEFAULT_TEMP_DIR + ) + + srt_to_vtt(subtitle, temp_file.name) + subtitle = temp_file.name + + subtitle_data = client_utils.encode_url_or_file_to_base64(subtitle) + return {"name": None, "data": subtitle_data, "is_file": False} + + def style(self, *, height: int | None = None, width: int | None = None, **kwargs): + """ + This method is deprecated. Please set these arguments in the constructor instead. + """ + warn_style_method_deprecation() + if height is not None: + self.height = height + if width is not None: + self.width = width + return self diff --git a/testbed/gradio-app__gradio/gradio/context.py b/testbed/gradio-app__gradio/gradio/context.py new file mode 100644 index 0000000000000000000000000000000000000000..be7f9059bfb4d27791b4f069bbd58659874533b2 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/context.py @@ -0,0 +1,25 @@ +# Defines the Context class, which is used to store the state of all Blocks that are being rendered. + +from __future__ import annotations + +from contextvars import ContextVar +from typing import TYPE_CHECKING + +if TYPE_CHECKING: # Only import for type checking (is False at runtime). + from gradio.blocks import BlockContext, Blocks + from gradio.routes import Request + + +class Context: + root_block: Blocks | None = None # The current root block that holds all blocks. + block: BlockContext | None = None # The current block that children are added to. + id: int = 0 # Running id to uniquely refer to any block that gets defined + ip_address: str | None = None # The IP address of the user. + hf_token: str | None = None # The token provided when loading private HF repos + + +class LocalContext: + blocks: ContextVar[Blocks | None] = ContextVar("blocks", default=None) + in_event_listener: ContextVar[bool] = ContextVar("in_event_listener", default=False) + event_id: ContextVar[str | None] = ContextVar("event_id", default=None) + request: ContextVar[Request | None] = ContextVar("request", default=None) diff --git a/testbed/gradio-app__gradio/gradio/data_classes.py b/testbed/gradio-app__gradio/gradio/data_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..5ebb0c2bded20f1ddbbc73ffc79fac18c7af92db --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/data_classes.py @@ -0,0 +1,62 @@ +"""Pydantic data models and other dataclasses. This is the only file that uses Optional[] +typing syntax instead of | None syntax to work with pydantic""" +from enum import Enum, auto +from typing import Any, Dict, List, Optional, Union + +from pydantic import BaseModel +from typing_extensions import Literal + + +class PredictBody(BaseModel): + session_hash: Optional[str] = None + event_id: Optional[str] = None + data: List[Any] + event_data: Optional[Any] = None + fn_index: Optional[int] = None + batched: Optional[ + bool + ] = False # Whether the data is a batch of samples (i.e. called from the queue if batch=True) or a single sample (i.e. called from the UI) + request: Optional[ + Union[Dict, List[Dict]] + ] = None # dictionary of request headers, query parameters, url, etc. (used to to pass in request for queuing) + + +class ResetBody(BaseModel): + session_hash: str + fn_index: int + + +class InterfaceTypes(Enum): + STANDARD = auto() + INPUT_ONLY = auto() + OUTPUT_ONLY = auto() + UNIFIED = auto() + + +class Estimation(BaseModel): + msg: Optional[str] = "estimation" + rank: Optional[int] = None + queue_size: int + avg_event_process_time: Optional[float] = None + avg_event_concurrent_process_time: Optional[float] = None + rank_eta: Optional[float] = None + queue_eta: float + + +class ProgressUnit(BaseModel): + index: Optional[int] = None + length: Optional[int] = None + unit: Optional[str] = None + progress: Optional[float] = None + desc: Optional[str] = None + + +class Progress(BaseModel): + msg: str = "progress" + progress_data: List[ProgressUnit] = [] + + +class LogMessage(BaseModel): + msg: str = "log" + log: str + level: Literal["info", "warning"] diff --git a/testbed/gradio-app__gradio/gradio/deploy_space.py b/testbed/gradio-app__gradio/gradio/deploy_space.py new file mode 100644 index 0000000000000000000000000000000000000000..9014b4e24ea2987d05dcf6ad58a6f0ee437646de --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/deploy_space.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +import argparse +import os +import re + +import huggingface_hub + +import gradio as gr + +repo_directory = os.getcwd() +readme_file = os.path.join(repo_directory, "README.md") +github_action_template = os.path.join( + os.path.dirname(__file__), "deploy_space_action.yaml" +) + + +def add_configuration_to_readme( + title: str | None, + app_file: str | None, +) -> dict: + configuration = {} + + dir_name = os.path.basename(repo_directory) + if title is None: + title = input(f"Enter Spaces app title [{dir_name}]: ") or dir_name + formatted_title = format_title(title) + if formatted_title != title: + print(f"Formatted to {formatted_title}. ") + configuration["title"] = formatted_title + + if app_file is None: + for file in os.listdir(repo_directory): + file_path = os.path.join(repo_directory, file) + if not os.path.isfile(file_path) or not file.endswith(".py"): + continue + + with open(file_path, encoding="utf-8", errors="ignore") as f: + content = f.read() + if "import gradio" in content: + app_file = file + break + + app_file = ( + input(f"Enter Gradio app file {f'[{app_file}]' if app_file else ''}: ") + or app_file + ) + if not app_file or not os.path.exists(app_file): + raise FileNotFoundError("Failed to find Gradio app file.") + configuration["app_file"] = app_file + + configuration["sdk"] = "gradio" + configuration["sdk_version"] = gr.__version__ + huggingface_hub.metadata_save(readme_file, configuration) + + configuration["hardware"] = ( + input( + f"Enter Spaces hardware ({', '.join(hardware.value for hardware in huggingface_hub.SpaceHardware)}) [cpu-basic]: " + ) + or "cpu-basic" + ) + + secrets = {} + if input("Any Spaces secrets (y/n) [n]: ") == "y": + while True: + secret_name = input("Enter secret name (leave blank to end): ") + if not secret_name: + break + secret_value = input(f"Enter secret value for {secret_name}: ") + secrets[secret_name] = secret_value + configuration["secrets"] = secrets + + requirements_file = os.path.join(repo_directory, "requirements.txt") + if ( + not os.path.exists(requirements_file) + and input("Create requirements.txt file? (y/n) [n]: ").lower() == "y" + ): + while True: + requirement = input("Enter a dependency (leave blank to end): ") + if not requirement: + break + with open(requirements_file, "a") as f: + f.write(requirement + "\n") + + if ( + input( + "Create Github Action to automatically update Space on 'git push'? [n]: " + ).lower() + == "y" + ): + track_branch = input("Enter branch to track [main]: ") or "main" + github_action_file = os.path.join( + repo_directory, ".github/workflows/update_space.yml" + ) + os.makedirs(os.path.dirname(github_action_file), exist_ok=True) + with open(github_action_template) as f: + github_action_content = f.read() + github_action_content = github_action_content.replace("$branch", track_branch) + with open(github_action_file, "w") as f: + f.write(github_action_content) + + print( + "Github Action created. Add your Hugging Face write token (from https://huggingface.co/settings/tokens) as an Actions Secret named 'hf_token' to your GitHub repository. This can be set in your repository's settings page." + ) + + return configuration + + +def format_title(title: str): + title = title.replace(" ", "_") + title = re.sub(r"[^a-zA-Z0-9\-._]", "", title) + title = re.sub("-+", "-", title) + while title.startswith("."): + title = title[1:] + return title + + +def deploy(): + if ( + os.getenv("SYSTEM") == "spaces" + ): # in case a repo with this function is uploaded to spaces + return + parser = argparse.ArgumentParser(description="Deploy to Spaces") + parser.add_argument("deploy") + parser.add_argument("--title", type=str, help="Spaces app title") + parser.add_argument("--app-file", type=str, help="File containing the Gradio app") + + args = parser.parse_args() + + hf_api = huggingface_hub.HfApi() + whoami = None + login = False + try: + whoami = hf_api.whoami() + if whoami["auth"]["accessToken"]["role"] != "write": + login = True + except OSError: + login = True + if login: + print("Need 'write' access token to create a Spaces repo.") + huggingface_hub.login(add_to_git_credential=False) + whoami = hf_api.whoami() + + configuration: None | dict = None + if os.path.exists(readme_file): + try: + configuration = huggingface_hub.metadata_load(readme_file) + except ValueError: + pass + + if configuration is None: + print( + f"Creating new Spaces Repo in '{repo_directory}'. Collecting metadata, press Enter to accept default value." + ) + configuration = add_configuration_to_readme( + args.title, + args.app_file, + ) + + space_id = huggingface_hub.create_repo( + configuration["title"], + space_sdk="gradio", + repo_type="space", + exist_ok=True, + space_hardware=configuration.get("hardware"), + ).repo_id + hf_api.upload_folder( + repo_id=space_id, + repo_type="space", + folder_path=repo_directory, + ) + if configuration.get("secrets"): + for secret_name, secret_value in configuration["secrets"].items(): + huggingface_hub.add_space_secret(space_id, secret_name, secret_value) + print(f"Space available at https://huggingface.co/spaces/{space_id}") diff --git a/testbed/gradio-app__gradio/gradio/deploy_space_action.yaml b/testbed/gradio-app__gradio/gradio/deploy_space_action.yaml new file mode 100644 index 0000000000000000000000000000000000000000..74f4cbdbde13728e57729531ca5de692b8521b69 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/deploy_space_action.yaml @@ -0,0 +1,28 @@ +name: Run Python script + +on: + push: + branches: + - $branch + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.9' + + - name: Install Gradio + run: python -m pip install gradio + + - name: Log in to Hugging Face + run: python -c 'import huggingface_hub; huggingface_hub.login(token="${{ secrets.hf_token }}")' + + - name: Deploy to Spaces + run: gradio deploy diff --git a/testbed/gradio-app__gradio/gradio/deprecation.py b/testbed/gradio-app__gradio/gradio/deprecation.py new file mode 100644 index 0000000000000000000000000000000000000000..d14f88ffcda40a78a8072c48c378f95b874ff8fa --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/deprecation.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +import warnings + +from gradio import utils + + +class GradioDeprecationWarning(UserWarning): + # This does not subclass DeprecationWarning + # because we want to show the warning by default. + pass + + +class GradioUnusedKwargWarning(UserWarning): + pass + + +def simple_deprecated_notice(term: str) -> str: + return f"`{term}` parameter is deprecated, and it has no effect" + + +def use_in_launch(term: str) -> str: + return f"`{term}` is deprecated in `Interface()`, please use it within `launch()` instead." + + +DEPRECATION_MESSAGE = { + "optional": simple_deprecated_notice("optional"), + "keep_filename": simple_deprecated_notice("keep_filename"), + "numeric": simple_deprecated_notice("numeric"), + "verbose": simple_deprecated_notice("verbose"), + "allow_screenshot": simple_deprecated_notice("allow_screenshot"), + "layout": simple_deprecated_notice("layout"), + "show_input": simple_deprecated_notice("show_input"), + "show_output": simple_deprecated_notice("show_output"), + "capture_session": simple_deprecated_notice("capture_session"), + "api_mode": simple_deprecated_notice("api_mode"), + "show_tips": use_in_launch("show_tips"), + "encrypt": simple_deprecated_notice("encrypt"), + "enable_queue": use_in_launch("enable_queue"), + "server_name": use_in_launch("server_name"), + "server_port": use_in_launch("server_port"), + "width": use_in_launch("width"), + "height": use_in_launch("height"), + "plot": "The 'plot' parameter has been deprecated. Use the new Plot component instead", +} + + +def check_deprecated_parameters( + cls: str, *, stacklevel: int | None = None, kwargs +) -> None: + if stacklevel is None: + stacklevel = utils.find_user_stack_level() + + for key, value in DEPRECATION_MESSAGE.items(): + if key in kwargs: + if key == "plot" and cls != "Image": + continue + kwargs.pop(key) + warnings.warn(value, GradioDeprecationWarning, stacklevel=stacklevel) + + if kwargs: + warnings.warn( + f"You have unused kwarg parameters in {cls}, please remove them: {kwargs}", + GradioUnusedKwargWarning, + stacklevel=stacklevel, + ) + + +def warn_deprecation(text: str) -> None: + warnings.warn( + text, + GradioDeprecationWarning, + stacklevel=utils.find_user_stack_level(), + ) + + +def warn_style_method_deprecation() -> None: + warn_deprecation( + "The `style` method is deprecated. Please set these arguments in the constructor instead." + ) diff --git a/testbed/gradio-app__gradio/gradio/events.py b/testbed/gradio-app__gradio/gradio/events.py new file mode 100644 index 0000000000000000000000000000000000000000..2fb5fdcf56d4ca36d7e4ad7b6cd1f4adbad5e526 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/events.py @@ -0,0 +1,531 @@ +"""Contains all of the events that can be triggered in a gr.Blocks() app, with the exception +of the on-page-load event, which is defined in gr.Blocks().load().""" + +from __future__ import annotations + +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Literal, Sequence + +from gradio_client.documentation import document, set_documentation_group + +from gradio.blocks import Block +from gradio.context import Context +from gradio.deprecation import warn_deprecation +from gradio.helpers import EventData +from gradio.utils import get_cancel_function + +if TYPE_CHECKING: # Only import for type checking (is False at runtime). + from gradio.components import Component + +set_documentation_group("events") + + +def set_cancel_events( + triggers: Sequence[EventListenerMethod], + cancels: None | dict[str, Any] | list[dict[str, Any]], +): + if cancels: + if not isinstance(cancels, list): + cancels = [cancels] + cancel_fn, fn_indices_to_cancel = get_cancel_function(cancels) + + if Context.root_block is None: + raise AttributeError( + "Cannot cancel {self.event_name} outside of a gradio.Blocks context." + ) + + Context.root_block.set_event_trigger( + triggers, + cancel_fn, + inputs=None, + outputs=None, + queue=False, + preprocess=False, + cancels=fn_indices_to_cancel, + ) + + +class EventListener(Block): + def __init__(self: Any): + for event_listener_class in EventListener.__subclasses__(): + if isinstance(self, event_listener_class): + event_listener_class.__init__(self) + + +class Dependency(dict): + def __init__(self, key_vals, dep_index, fn): + super().__init__(key_vals) + self.fn = fn + self.then = EventListenerMethod( + None, + "then", + trigger_after=dep_index, + trigger_only_on_success=False, + ) + """ + Triggered after directly preceding event is completed, regardless of success or failure. + """ + self.success = EventListenerMethod( + None, + "success", + trigger_after=dep_index, + trigger_only_on_success=True, + ) + """ + Triggered after directly preceding event is completed, if it was successful. + """ + + def __call__(self, *args, **kwargs): + return self.fn(*args, **kwargs) + + +class EventListenerMethod: + """ + Triggered on an event deployment. + """ + + def __init__( + self, + trigger: Block | None, + event_name: str, + show_progress: Literal["full", "minimal", "hidden"] = "full", + callback: Callable | None = None, + trigger_after: int | None = None, + trigger_only_on_success: bool = False, + ): + self.trigger = trigger + self.event_name = event_name + self.show_progress = show_progress + self.callback = callback + self.trigger_after = trigger_after + self.trigger_only_on_success = trigger_only_on_success + + def __call__( + self, + fn: Callable | None | Literal["decorator"] = "decorator", + inputs: Component | Sequence[Component] | set[Component] | None = None, + outputs: Component | Sequence[Component] | None = None, + api_name: str | None | Literal[False] = None, + status_tracker: None = None, + scroll_to_output: bool = False, + show_progress: Literal["full", "minimal", "hidden"] | None = None, + queue: bool | None = None, + batch: bool = False, + max_batch_size: int = 4, + preprocess: bool = True, + postprocess: bool = True, + cancels: dict[str, Any] | list[dict[str, Any]] | None = None, + every: float | None = None, + _js: str | None = None, + ) -> Dependency: + """ + Parameters: + fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component. + inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list. + outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list. + api_name: Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, the endpoint will be exposed in the api docs as an unnamed endpoint, although this behavior will be changed in Gradio 4.0. If set to a string, the endpoint will be exposed in the api docs with the given name. + status_tracker: Deprecated and has no effect. + scroll_to_output: If True, will scroll to output component on completion + show_progress: If True, will show progress animation while pending + queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app. + batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component. + max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True) + preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component). + postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser. + cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish. + every: Run this event 'every' number of seconds while the client connection is open. Interpreted in seconds. Queue must be enabled. + """ + if fn == "decorator": + + def wrapper(func): + self.__call__( + func, + inputs, + outputs, + api_name, + status_tracker, + scroll_to_output, + show_progress, + queue, + batch, + max_batch_size, + preprocess, + postprocess, + cancels, + every, + _js, + ) + + @wraps(func) + def inner(*args, **kwargs): + return func(*args, **kwargs) + + return inner + + return Dependency({}, None, wrapper) + + if status_tracker: + warn_deprecation( + "The 'status_tracker' parameter has been deprecated and has no effect." + ) + if self.event_name == "stop": + warn_deprecation( + "The `stop` event on Video and Audio has been deprecated and will be remove in a future version. Use `ended` instead." + ) + + if isinstance(self, Streamable): + self.check_streamable() + if isinstance(show_progress, bool): + show_progress = "full" if show_progress else "hidden" + + if Context.root_block is None: + raise AttributeError( + "Cannot call {self.event_name} outside of a gradio.Blocks context." + ) + + dep, dep_index = Context.root_block.set_event_trigger( + [self], + fn, + inputs, + outputs, + preprocess=preprocess, + postprocess=postprocess, + scroll_to_output=scroll_to_output, + show_progress=show_progress + if show_progress is not None + else self.show_progress, + api_name=api_name, + js=_js, + queue=queue, + batch=batch, + max_batch_size=max_batch_size, + every=every, + trigger_after=self.trigger_after, + trigger_only_on_success=self.trigger_only_on_success, + ) + set_cancel_events([self], cancels) + if self.callback: + self.callback() + return Dependency(dep, dep_index, fn) + + +def on( + triggers: Sequence[EventListenerMethod] | EventListenerMethod | None = None, + fn: Callable | None | Literal["decorator"] = "decorator", + inputs: Component | list[Component] | set[Component] | None = None, + outputs: Component | list[Component] | None = None, + *, + api_name: str | None | Literal[False] = None, + scroll_to_output: bool = False, + show_progress: Literal["full", "minimal", "hidden"] = "full", + queue: bool | None = None, + batch: bool = False, + max_batch_size: int = 4, + preprocess: bool = True, + postprocess: bool = True, + cancels: dict[str, Any] | list[dict[str, Any]] | None = None, + every: float | None = None, + _js: str | None = None, +) -> Dependency: + """ + Parameters: + triggers: List of triggers to listen to, e.g. [btn.click, number.change]. If None, will listen to changes to any inputs. + fn: the function to call when this event is triggered. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component. + inputs: List of gradio.components to use as inputs. If the function takes no inputs, this should be an empty list. + outputs: List of gradio.components to use as outputs. If the function returns no outputs, this should be an empty list. + api_name: Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False, the endpoint will not be exposed in the api docs. If set to None, the endpoint will be exposed in the api docs as an unnamed endpoint, although this behavior will be changed in Gradio 4.0. If set to a string, the endpoint will be exposed in the api docs with the given name. + scroll_to_output: If True, will scroll to output component on completion + show_progress: If True, will show progress animation while pending + queue: If True, will place the request on the queue, if the queue has been enabled. If False, will not put this event on the queue, even if the queue has been enabled. If None, will use the queue setting of the gradio app. + batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component. + max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True) + preprocess: If False, will not run preprocessing of component data before running 'fn' (e.g. leaving it as a base64 string if this method is called with the `Image` component). + postprocess: If False, will not run postprocessing of component data before returning 'fn' output to the browser. + cancels: A list of other events to cancel when this listener is triggered. For example, setting cancels=[click_event] will cancel the click_event, where click_event is the return value of another components .click method. Functions that have not yet run (or generators that are iterating) will be cancelled, but functions that are currently running will be allowed to finish. + every: Run this event 'every' number of seconds while the client connection is open. Interpreted in seconds. Queue must be enabled. + """ + from gradio.components.base import Component + + if isinstance(triggers, EventListenerMethod): + triggers = [triggers] + if isinstance(inputs, Component): + inputs = [inputs] + + if fn == "decorator": + + def wrapper(func): + on( + triggers, + fn=func, + inputs=inputs, + outputs=outputs, + api_name=api_name, + scroll_to_output=scroll_to_output, + show_progress=show_progress, + queue=queue, + batch=batch, + max_batch_size=max_batch_size, + preprocess=preprocess, + postprocess=postprocess, + cancels=cancels, + every=every, + _js=_js, + ) + + @wraps(func) + def inner(*args, **kwargs): + return func(*args, **kwargs) + + return inner + + return Dependency({}, None, wrapper) + + if Context.root_block is None: + raise Exception("Cannot call on() outside of a gradio.Blocks context.") + if triggers is None: + triggers = [input.change for input in inputs] if inputs is not None else [] + + dep, dep_index = Context.root_block.set_event_trigger( + triggers, + fn, + inputs, + outputs, + preprocess=preprocess, + postprocess=postprocess, + scroll_to_output=scroll_to_output, + show_progress=show_progress, + api_name=api_name, + js=_js, + queue=queue, + batch=batch, + max_batch_size=max_batch_size, + every=every, + ) + set_cancel_events(triggers, cancels) + return Dependency(dep, dep_index, fn) + + +@document("*change", inherit=True) +class Changeable(EventListener): + def __init__(self): + self.change = EventListenerMethod(self, "change") + """ + This listener is triggered when the component's value changes either because of user input (e.g. a user types in a textbox) OR because of a function update (e.g. an image receives a value from the output of an event trigger). + See `.input()` for a listener that is only triggered by user input. + """ + + +@document("*input", inherit=True) +class Inputable(EventListener): + def __init__(self): + self.input = EventListenerMethod(self, "input") + """ + This listener is triggered when the user changes the value of the component. + """ + + +@document("*click", inherit=True) +class Clickable(EventListener): + def __init__(self): + self.click = EventListenerMethod(self, "click") + """ + This listener is triggered when the component (e.g. a button) is clicked. + """ + + +@document("*submit", inherit=True) +class Submittable(EventListener): + def __init__(self): + self.submit = EventListenerMethod(self, "submit") + """ + This listener is triggered when the user presses the Enter key while the component (e.g. a textbox) is focused. + """ + + +@document("*edit", inherit=True) +class Editable(EventListener): + def __init__(self): + self.edit = EventListenerMethod(self, "edit") + """ + This listener is triggered when the user edits the component (e.g. image) using the + built-in editor. + """ + + +@document("*clear", inherit=True) +class Clearable(EventListener): + def __init__(self): + self.clear = EventListenerMethod(self, "clear") + """ + This listener is triggered when the user clears the component (e.g. image or audio) + using the X button for the component. + """ + + +@document("*play", "*pause", "*stop", "*end", inherit=True) +class Playable(EventListener): + def __init__(self): + self.play = EventListenerMethod(self, "play") + """ + This listener is triggered when the user plays the component (e.g. audio or video). + """ + + self.pause = EventListenerMethod(self, "pause") + """ + This listener is triggered when the media stops playing for any reason (e.g. audio or video). + """ + + self.stop = EventListenerMethod(self, "stop") + """ + This listener is triggered when the user reaches the end of the media track (e.g. audio or video). + """ + + self.end = EventListenerMethod(self, "end") + """ + This listener is triggered when the user reaches the end of the media track (e.g. audio or video). + """ + + +@document("*stream", inherit=True) +class Streamable(EventListener): + def __init__(self): + self.streaming: bool + self.stream = EventListenerMethod( + self, + "stream", + show_progress="hidden", + callback=lambda: setattr(self, "streaming", True), + ) + """ + This listener is triggered when the user streams the component (e.g. a live webcam + component). + """ + + def check_streamable(self): + pass + + +class StreamableOutput(EventListener): + def __init__(self): + self.streaming: bool + + def stream_output(self, y, output_id: str, first_chunk: bool) -> tuple[bytes, Any]: + raise NotImplementedError + + +@document("*start_recording", "*stop_recording", inherit=True) +class Recordable(EventListener): + def __init__(self): + self.start_recording = EventListenerMethod(self, "start_recording") + """ + This listener is triggered when the user starts recording with the component (e.g. audio or video). + """ + + self.stop_recording = EventListenerMethod(self, "stop_recording") + """ + This listener is triggered when the user stops recording with the component (e.g. audio or video). + """ + + +@document("*focus", "*blur", inherit=True) +class Focusable(EventListener): + def __init__(self): + self.focus = EventListenerMethod(self, "focus") + """ + This listener is triggered when the component is focused (e.g. when the user clicks inside a textbox). + """ + + self.blur = EventListenerMethod(self, "blur") + """ + This listener is triggered when the component's is unfocused/blurred (e.g. when the user clicks outside of a textbox). + """ + + +@document("*upload", inherit=True) +class Uploadable(EventListener): + def __init__(self): + self.upload = EventListenerMethod(self, "upload") + """ + This listener is triggered when the user uploads a file into the component (e.g. when the user uploads a video into a video component). + """ + + +@document("*release", inherit=True) +class Releaseable(EventListener): + def __init__(self): + self.release = EventListenerMethod(self, "release") + """ + This listener is triggered when the user releases the mouse on this component (e.g. when the user releases the slider). + """ + + +@document("*select", inherit=True) +class Selectable(EventListener): + def __init__(self): + self.selectable: bool = False + self.select = EventListenerMethod( + self, "select", callback=lambda: setattr(self, "selectable", True) + ) + """ + This listener is triggered when the user selects from within the Component. + This event has EventData of type gradio.SelectData that carries information, accessible through SelectData.index and SelectData.value. + See EventData documentation on how to use this event data. + """ + + def get_config(self): + config = super().get_config() + config["selectable"] = self.selectable + return config + + +class SelectData(EventData): + def __init__(self, target: Block | None, data: Any): + super().__init__(target, data) + self.index: int | tuple[int, int] = data["index"] + """ + The index of the selected item. Is a tuple if the component is two dimensional or selection is a range. + """ + self.value: Any = data["value"] + """ + The value of the selected item. + """ + self.selected: bool = data.get("selected", True) + """ + True if the item was selected, False if deselected. + """ + + +@document("*like", inherit=True) +class Likeable(EventListener): + def __init__(self): + self.likeable: bool = False + self.like = EventListenerMethod( + self, "like", callback=lambda: setattr(self, "likeable", True) + ) + """ + This listener is triggered when the user likes/dislikes from within the Component. + This event has EventData of type gradio.LikeData that carries information, accessible through LikeData.index and LikeData.value. + See EventData documentation on how to use this event data. + """ + + def get_config(self): + config = super().get_config() + config["likeable"] = self.likeable + return config + + +class LikeData(EventData): + def __init__(self, target: Block | None, data: Any): + super().__init__(target, data) + self.index: int | tuple[int, int] = data["index"] + """ + The index of the liked/disliked item. Is a tuple if the component is two dimensional. + """ + self.value: Any = data["value"] + """ + The value of the liked/disliked item. + """ + self.liked: bool = data.get("liked", True) + """ + True if the item was liked, False if disliked. + """ diff --git a/testbed/gradio-app__gradio/gradio/exceptions.py b/testbed/gradio-app__gradio/gradio/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..bca214460e8f34378a053d5b6b42daf35dceeb60 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/exceptions.py @@ -0,0 +1,65 @@ +from gradio_client.documentation import document, set_documentation_group + +set_documentation_group("helpers") + + +class DuplicateBlockError(ValueError): + """Raised when a Blocks contains more than one Block with the same id""" + + pass + + +class TooManyRequestsError(Exception): + """Raised when the Hugging Face API returns a 429 status code.""" + + pass + + +class InvalidApiNameError(ValueError): + pass + + +class ServerFailedToStartError(Exception): + pass + + +class InvalidBlockError(ValueError): + """Raised when an event in a Blocks contains a reference to a Block that is not in the original Blocks""" + + pass + + +class ReloadError(ValueError): + """Raised when something goes wrong when reloading the gradio app.""" + + pass + + +InvalidApiName = InvalidApiNameError # backwards compatibility + +set_documentation_group("modals") + + +@document() +class Error(Exception): + """ + This class allows you to pass custom error messages to the user. You can do so by raising a gr.Error("custom message") anywhere in the code, and when that line is executed the custom message will appear in a modal on the demo. + Example: + import gradio as gr + def divide(numerator, denominator): + if denominator == 0: + raise gr.Error("Cannot divide by zero!") + gr.Interface(divide, ["number", "number"], "number").launch() + Demos: calculator, blocks_chained_events + """ + + def __init__(self, message: str = "Error raised."): + """ + Parameters: + message: The error message to be displayed to the user. + """ + self.message = message + super().__init__(self.message) + + def __str__(self): + return repr(self.message) diff --git a/testbed/gradio-app__gradio/gradio/external.py b/testbed/gradio-app__gradio/gradio/external.py new file mode 100644 index 0000000000000000000000000000000000000000..88710ca7c4d30d1f166863777290b65cf507953a --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/external.py @@ -0,0 +1,556 @@ +"""This module should not be used directly as its API is subject to change. Instead, +use the `gr.Blocks.load()` or `gr.load()` functions.""" + +from __future__ import annotations + +import json +import re +import warnings +from typing import TYPE_CHECKING, Callable + +import requests +from gradio_client import Client +from gradio_client.documentation import document, set_documentation_group + +import gradio +from gradio import components, utils +from gradio.context import Context +from gradio.deprecation import warn_deprecation +from gradio.exceptions import Error, TooManyRequestsError +from gradio.external_utils import ( + cols_to_rows, + encode_to_base64, + get_tabular_examples, + postprocess_label, + rows_to_cols, + streamline_spaces_interface, +) +from gradio.processing_utils import extract_base64_data, to_binary + +if TYPE_CHECKING: + from gradio.blocks import Blocks + from gradio.interface import Interface + + +set_documentation_group("helpers") + + +@document() +def load( + name: str, + src: str | None = None, + api_key: str | None = None, + hf_token: str | None = None, + alias: str | None = None, + **kwargs, +) -> Blocks: + """ + Method that constructs a Blocks from a Hugging Face repo. Can accept + model repos (if src is "models") or Space repos (if src is "spaces"). The input + and output components are automatically loaded from the repo. + Parameters: + name: the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base") + src: the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`) + api_key: Deprecated. Please use the `hf_token` parameter instead. + hf_token: optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens. Warning: only provide this if you are loading a trusted private Space as it can be read by the Space you are loading. + alias: optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x) + Returns: + a Gradio Blocks object for the given model + Example: + import gradio as gr + demo = gr.load("gradio/question-answering", src="spaces") + demo.launch() + """ + if hf_token is None and api_key: + warn_deprecation( + "The `api_key` parameter will be deprecated. " + "Please use the `hf_token` parameter going forward." + ) + hf_token = api_key + return load_blocks_from_repo( + name=name, src=src, hf_token=hf_token, alias=alias, **kwargs + ) + + +def load_blocks_from_repo( + name: str, + src: str | None = None, + hf_token: str | None = None, + alias: str | None = None, + **kwargs, +) -> Blocks: + """Creates and returns a Blocks instance from a Hugging Face model or Space repo.""" + if src is None: + # Separate the repo type (e.g. "model") from repo name (e.g. "google/vit-base-patch16-224") + tokens = name.split("/") + assert ( + len(tokens) > 1 + ), "Either `src` parameter must be provided, or `name` must be formatted as {src}/{repo name}" + src = tokens[0] + name = "/".join(tokens[1:]) + + factory_methods: dict[str, Callable] = { + # for each repo type, we have a method that returns the Interface given the model name & optionally an api_key + "huggingface": from_model, + "models": from_model, + "spaces": from_spaces, + } + assert ( + src.lower() in factory_methods + ), f"parameter: src must be one of {factory_methods.keys()}" + + if hf_token is not None: + if Context.hf_token is not None and Context.hf_token != hf_token: + warnings.warn( + """You are loading a model/Space with a different access token than the one you used to load a previous model/Space. This is not recommended, as it may cause unexpected behavior.""" + ) + Context.hf_token = hf_token + + blocks: gradio.Blocks = factory_methods[src](name, hf_token, alias, **kwargs) + return blocks + + +def chatbot_preprocess(text, state): + payload = { + "inputs": {"generated_responses": None, "past_user_inputs": None, "text": text} + } + if state is not None: + payload["inputs"]["generated_responses"] = state["conversation"][ + "generated_responses" + ] + payload["inputs"]["past_user_inputs"] = state["conversation"][ + "past_user_inputs" + ] + + return payload + + +def chatbot_postprocess(response): + response_json = response.json() + chatbot_value = list( + zip( + response_json["conversation"]["past_user_inputs"], + response_json["conversation"]["generated_responses"], + ) + ) + return chatbot_value, response_json + + +def from_model(model_name: str, hf_token: str | None, alias: str | None, **kwargs): + model_url = f"https://huggingface.co/{model_name}" + api_url = f"https://api-inference.huggingface.co/models/{model_name}" + print(f"Fetching model from: {model_url}") + + headers = {"Authorization": f"Bearer {hf_token}"} if hf_token is not None else {} + + # Checking if model exists, and if so, it gets the pipeline + response = requests.request("GET", api_url, headers=headers) + assert ( + response.status_code == 200 + ), f"Could not find model: {model_name}. If it is a private or gated model, please provide your Hugging Face access token (https://huggingface.co/settings/tokens) as the argument for the `api_key` parameter." + p = response.json().get("pipeline_tag") + pipelines = { + "audio-classification": { + # example model: ehcalabres/wav2vec2-lg-xlsr-en-speech-emotion-recognition + "inputs": components.Audio( + source="upload", type="filepath", label="Input", render=False + ), + "outputs": components.Label(label="Class", render=False), + "preprocess": lambda i: to_binary, + "postprocess": lambda r: postprocess_label( + {i["label"].split(", ")[0]: i["score"] for i in r.json()} + ), + }, + "audio-to-audio": { + # example model: facebook/xm_transformer_sm_all-en + "inputs": components.Audio( + source="upload", type="filepath", label="Input", render=False + ), + "outputs": components.Audio(label="Output", render=False), + "preprocess": to_binary, + "postprocess": encode_to_base64, + }, + "automatic-speech-recognition": { + # example model: facebook/wav2vec2-base-960h + "inputs": components.Audio( + source="upload", type="filepath", label="Input", render=False + ), + "outputs": components.Textbox(label="Output", render=False), + "preprocess": to_binary, + "postprocess": lambda r: r.json()["text"], + }, + "conversational": { + "inputs": [components.Textbox(render=False), components.State(render=False)], # type: ignore + "outputs": [components.Chatbot(render=False), components.State(render=False)], # type: ignore + "preprocess": chatbot_preprocess, + "postprocess": chatbot_postprocess, + }, + "feature-extraction": { + # example model: julien-c/distilbert-feature-extraction + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Dataframe(label="Output", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r.json()[0], + }, + "fill-mask": { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: postprocess_label( + {i["token_str"]: i["score"] for i in r.json()} + ), + }, + "image-classification": { + # Example: google/vit-base-patch16-224 + "inputs": components.Image( + type="filepath", label="Input Image", render=False + ), + "outputs": components.Label(label="Classification", render=False), + "preprocess": to_binary, + "postprocess": lambda r: postprocess_label( + {i["label"].split(", ")[0]: i["score"] for i in r.json()} + ), + }, + "question-answering": { + # Example: deepset/xlm-roberta-base-squad2 + "inputs": [ + components.Textbox(lines=7, label="Context", render=False), + components.Textbox(label="Question", render=False), + ], + "outputs": [ + components.Textbox(label="Answer", render=False), + components.Label(label="Score", render=False), + ], + "preprocess": lambda c, q: {"inputs": {"context": c, "question": q}}, + "postprocess": lambda r: (r.json()["answer"], {"label": r.json()["score"]}), + }, + "summarization": { + # Example: facebook/bart-large-cnn + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Textbox(label="Summary", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r.json()[0]["summary_text"], + }, + "text-classification": { + # Example: distilbert-base-uncased-finetuned-sst-2-english + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: postprocess_label( + {i["label"].split(", ")[0]: i["score"] for i in r.json()[0]} + ), + }, + "text-generation": { + # Example: gpt2 + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Textbox(label="Output", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r.json()[0]["generated_text"], + }, + "text2text-generation": { + # Example: valhalla/t5-small-qa-qg-hl + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Textbox(label="Generated Text", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r.json()[0]["generated_text"], + }, + "translation": { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Textbox(label="Translation", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r.json()[0]["translation_text"], + }, + "zero-shot-classification": { + # Example: facebook/bart-large-mnli + "inputs": [ + components.Textbox(label="Input", render=False), + components.Textbox( + label="Possible class names (" "comma-separated)", render=False + ), + components.Checkbox(label="Allow multiple true classes", render=False), + ], + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda i, c, m: { + "inputs": i, + "parameters": {"candidate_labels": c, "multi_class": m}, + }, + "postprocess": lambda r: postprocess_label( + { + r.json()["labels"][i]: r.json()["scores"][i] + for i in range(len(r.json()["labels"])) + } + ), + }, + "sentence-similarity": { + # Example: sentence-transformers/distilbert-base-nli-stsb-mean-tokens + "inputs": [ + components.Textbox( + value="That is a happy person", + label="Source Sentence", + render=False, + ), + components.Textbox( + lines=7, + placeholder="Separate each sentence by a newline", + label="Sentences to compare to", + render=False, + ), + ], + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda src, sentences: { + "inputs": { + "source_sentence": src, + "sentences": [s for s in sentences.splitlines() if s != ""], + } + }, + "postprocess": lambda r: postprocess_label( + {f"sentence {i}": v for i, v in enumerate(r.json())} + ), + }, + "text-to-speech": { + # Example: julien-c/ljspeech_tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Audio(label="Audio", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": encode_to_base64, + }, + "text-to-image": { + # example model: osanseviero/BigGAN-deep-128 + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Image(label="Output", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": encode_to_base64, + }, + "token-classification": { + # example model: huggingface-course/bert-finetuned-ner + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.HighlightedText(label="Output", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r, # Handled as a special case in query_huggingface_api() + }, + "document-question-answering": { + # example model: impira/layoutlm-document-qa + "inputs": [ + components.Image(type="filepath", label="Input Document", render=False), + components.Textbox(label="Question", render=False), + ], + "outputs": components.Label(label="Label", render=False), + "preprocess": lambda img, q: { + "inputs": { + "image": extract_base64_data(img), # Extract base64 data + "question": q, + } + }, + "postprocess": lambda r: postprocess_label( + {i["answer"]: i["score"] for i in r.json()} + ), + }, + "visual-question-answering": { + # example model: dandelin/vilt-b32-finetuned-vqa + "inputs": [ + components.Image(type="filepath", label="Input Image", render=False), + components.Textbox(label="Question", render=False), + ], + "outputs": components.Label(label="Label", render=False), + "preprocess": lambda img, q: { + "inputs": { + "image": extract_base64_data(img), + "question": q, + } + }, + "postprocess": lambda r: postprocess_label( + {i["answer"]: i["score"] for i in r.json()} + ), + }, + "image-to-text": { + # example model: Salesforce/blip-image-captioning-base + "inputs": components.Image( + type="filepath", label="Input Image", render=False + ), + "outputs": components.Textbox(label="Generated Text", render=False), + "preprocess": to_binary, + "postprocess": lambda r: r.json()[0]["generated_text"], + }, + } + + if p in ["tabular-classification", "tabular-regression"]: + example_data = get_tabular_examples(model_name) + col_names, example_data = cols_to_rows(example_data) + example_data = [[example_data]] if example_data else None + + pipelines[p] = { + "inputs": components.Dataframe( + label="Input Rows", + type="pandas", + headers=col_names, + col_count=(len(col_names), "fixed"), + render=False, + ), + "outputs": components.Dataframe( + label="Predictions", type="array", headers=["prediction"], render=False + ), + "preprocess": rows_to_cols, + "postprocess": lambda r: { + "headers": ["prediction"], + "data": [[pred] for pred in json.loads(r.text)], + }, + "examples": example_data, + } + + if p is None or p not in pipelines: + raise ValueError(f"Unsupported pipeline type: {p}") + + pipeline = pipelines[p] + + def query_huggingface_api(*params): + # Convert to a list of input components + data = pipeline["preprocess"](*params) + if isinstance( + data, dict + ): # HF doesn't allow additional parameters for binary files (e.g. images or audio files) + data.update({"options": {"wait_for_model": True}}) + data = json.dumps(data) + response = requests.request("POST", api_url, headers=headers, data=data) + if response.status_code != 200: + errors_json = response.json() + errors, warns = "", "" + if errors_json.get("error"): + errors = f", Error: {errors_json.get('error')}" + if errors_json.get("warnings"): + warns = f", Warnings: {errors_json.get('warnings')}" + raise Error( + f"Could not complete request to HuggingFace API, Status Code: {response.status_code}" + + errors + + warns + ) + if ( + p == "token-classification" + ): # Handle as a special case since HF API only returns the named entities and we need the input as well + ner_groups = response.json() + input_string = params[0] + response = utils.format_ner_list(input_string, ner_groups) + output = pipeline["postprocess"](response) + return output + + if alias is None: + query_huggingface_api.__name__ = model_name + else: + query_huggingface_api.__name__ = alias + + interface_info = { + "fn": query_huggingface_api, + "inputs": pipeline["inputs"], + "outputs": pipeline["outputs"], + "title": model_name, + "examples": pipeline.get("examples"), + } + + kwargs = dict(interface_info, **kwargs) + + # So interface doesn't run pre/postprocess + # except for conversational interfaces which + # are stateful + kwargs["_api_mode"] = p != "conversational" + + interface = gradio.Interface(**kwargs) + return interface + + +def from_spaces( + space_name: str, hf_token: str | None, alias: str | None, **kwargs +) -> Blocks: + space_url = f"https://huggingface.co/spaces/{space_name}" + + print(f"Fetching Space from: {space_url}") + + headers = {} + if hf_token is not None: + headers["Authorization"] = f"Bearer {hf_token}" + + iframe_url = ( + requests.get( + f"https://huggingface.co/api/spaces/{space_name}/host", headers=headers + ) + .json() + .get("host") + ) + + if iframe_url is None: + raise ValueError( + f"Could not find Space: {space_name}. If it is a private or gated Space, please provide your Hugging Face access token (https://huggingface.co/settings/tokens) as the argument for the `api_key` parameter." + ) + + r = requests.get(iframe_url, headers=headers) + + result = re.search( + r"window.gradio_config = (.*?);[\s]*", r.text + ) # some basic regex to extract the config + try: + config = json.loads(result.group(1)) # type: ignore + except AttributeError as ae: + raise ValueError(f"Could not load the Space: {space_name}") from ae + if "allow_flagging" in config: # Create an Interface for Gradio 2.x Spaces + return from_spaces_interface( + space_name, config, alias, hf_token, iframe_url, **kwargs + ) + else: # Create a Blocks for Gradio 3.x Spaces + if kwargs: + warnings.warn( + "You cannot override parameters for this Space by passing in kwargs. " + "Instead, please load the Space as a function and use it to create a " + "Blocks or Interface locally. You may find this Guide helpful: " + "https://gradio.app/using_blocks_like_functions/" + ) + return from_spaces_blocks(space=space_name, hf_token=hf_token) + + +def from_spaces_blocks(space: str, hf_token: str | None) -> Blocks: + client = Client(space, hf_token=hf_token) + predict_fns = [endpoint._predict_resolve for endpoint in client.endpoints] + return gradio.Blocks.from_config(client.config, predict_fns, client.src) + + +def from_spaces_interface( + model_name: str, + config: dict, + alias: str | None, + hf_token: str | None, + iframe_url: str, + **kwargs, +) -> Interface: + config = streamline_spaces_interface(config) + api_url = f"{iframe_url}/api/predict/" + headers = {"Content-Type": "application/json"} + if hf_token is not None: + headers["Authorization"] = f"Bearer {hf_token}" + + # The function should call the API with preprocessed data + def fn(*data): + data = json.dumps({"data": data}) + response = requests.post(api_url, headers=headers, data=data) + result = json.loads(response.content.decode("utf-8")) + if "error" in result and "429" in result["error"]: + raise TooManyRequestsError("Too many requests to the Hugging Face API") + try: + output = result["data"] + except KeyError as ke: + raise KeyError( + f"Could not find 'data' key in response from external Space. Response received: {result}" + ) from ke + if ( + len(config["outputs"]) == 1 + ): # if the fn is supposed to return a single value, pop it + output = output[0] + if len(config["outputs"]) == 1 and isinstance( + output, list + ): # Needed to support Output.Image() returning bounding boxes as well (TODO: handle different versions of gradio since they have slightly different APIs) + output = output[0] + return output + + fn.__name__ = alias if (alias is not None) else model_name + config["fn"] = fn + + kwargs = dict(config, **kwargs) + kwargs["_api_mode"] = True + interface = gradio.Interface(**kwargs) + return interface diff --git a/testbed/gradio-app__gradio/gradio/external_utils.py b/testbed/gradio-app__gradio/gradio/external_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6064bd25da68c51ee9b09f3551e2a31fdb253b --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/external_utils.py @@ -0,0 +1,140 @@ +"""Utility function for gradio/external.py""" + +import base64 +import math +import operator +import re +import warnings +from typing import Dict, List, Tuple + +import requests +import yaml + +from gradio import components + +################## +# Helper functions for processing tabular data +################## + + +def get_tabular_examples(model_name: str) -> Dict[str, List[float]]: + readme = requests.get(f"https://huggingface.co/{model_name}/resolve/main/README.md") + if readme.status_code != 200: + warnings.warn(f"Cannot load examples from README for {model_name}", UserWarning) + example_data = {} + else: + yaml_regex = re.search( + "(?:^|[\r\n])---[\n\r]+([\\S\\s]*?)[\n\r]+---([\n\r]|$)", readme.text + ) + if yaml_regex is None: + example_data = {} + else: + example_yaml = next( + yaml.safe_load_all(readme.text[: yaml_regex.span()[-1]]) + ) + example_data = example_yaml.get("widget", {}).get("structuredData", {}) + if not example_data: + raise ValueError( + f"No example data found in README.md of {model_name} - Cannot build gradio demo. " + "See the README.md here: https://huggingface.co/scikit-learn/tabular-playground/blob/main/README.md " + "for a reference on how to provide example data to your model." + ) + # replace nan with string NaN for inference API + for data in example_data.values(): + for i, val in enumerate(data): + if isinstance(val, float) and math.isnan(val): + data[i] = "NaN" + return example_data + + +def cols_to_rows( + example_data: Dict[str, List[float]] +) -> Tuple[List[str], List[List[float]]]: + headers = list(example_data.keys()) + n_rows = max(len(example_data[header] or []) for header in headers) + data = [] + for row_index in range(n_rows): + row_data = [] + for header in headers: + col = example_data[header] or [] + if row_index >= len(col): + row_data.append("NaN") + else: + row_data.append(col[row_index]) + data.append(row_data) + return headers, data + + +def rows_to_cols(incoming_data: Dict) -> Dict[str, Dict[str, Dict[str, List[str]]]]: + data_column_wise = {} + for i, header in enumerate(incoming_data["headers"]): + data_column_wise[header] = [str(row[i]) for row in incoming_data["data"]] + return {"inputs": {"data": data_column_wise}} + + +################## +# Helper functions for processing other kinds of data +################## + + +def postprocess_label(scores: Dict) -> Dict: + sorted_pred = sorted(scores.items(), key=operator.itemgetter(1), reverse=True) + return { + "label": sorted_pred[0][0], + "confidences": [ + {"label": pred[0], "confidence": pred[1]} for pred in sorted_pred + ], + } + + +def encode_to_base64(r: requests.Response) -> str: + # Handles the different ways HF API returns the prediction + base64_repr = base64.b64encode(r.content).decode("utf-8") + data_prefix = ";base64," + # Case 1: base64 representation already includes data prefix + if data_prefix in base64_repr: + return base64_repr + else: + content_type = r.headers.get("content-type") + # Case 2: the data prefix is a key in the response + if content_type == "application/json": + try: + data = r.json()[0] + content_type = data["content-type"] + base64_repr = data["blob"] + except KeyError as ke: + raise ValueError( + "Cannot determine content type returned by external API." + ) from ke + # Case 3: the data prefix is included in the response headers + else: + pass + new_base64 = f"data:{content_type};base64,{base64_repr}" + return new_base64 + + +################## +# Helper function for cleaning up an Interface loaded from HF Spaces +################## + + +def streamline_spaces_interface(config: Dict) -> Dict: + """Streamlines the interface config dictionary to remove unnecessary keys.""" + config["inputs"] = [ + components.get_component_instance(component) + for component in config["input_components"] + ] + config["outputs"] = [ + components.get_component_instance(component) + for component in config["output_components"] + ] + parameters = { + "article", + "description", + "flagging_options", + "inputs", + "outputs", + "title", + } + config = {k: config[k] for k in parameters} + return config diff --git a/testbed/gradio-app__gradio/gradio/flagging.py b/testbed/gradio-app__gradio/gradio/flagging.py new file mode 100644 index 0000000000000000000000000000000000000000..465982f43dd27de6050079d82ab9885cc94c5457 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/flagging.py @@ -0,0 +1,526 @@ +from __future__ import annotations + +import csv +import datetime +import json +import os +import time +import uuid +from abc import ABC, abstractmethod +from collections import OrderedDict +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import filelock +import huggingface_hub +from gradio_client import utils as client_utils +from gradio_client.documentation import document, set_documentation_group + +import gradio as gr +from gradio import utils +from gradio.deprecation import warn_deprecation + +if TYPE_CHECKING: + from gradio.components import IOComponent + +set_documentation_group("flagging") + + +class FlaggingCallback(ABC): + """ + An abstract class for defining the methods that any FlaggingCallback should have. + """ + + @abstractmethod + def setup(self, components: list[IOComponent], flagging_dir: str): + """ + This method should be overridden and ensure that everything is set up correctly for flag(). + This method gets called once at the beginning of the Interface.launch() method. + Parameters: + components: Set of components that will provide flagged data. + flagging_dir: A string, typically containing the path to the directory where the flagging file should be storied (provided as an argument to Interface.__init__()). + """ + pass + + @abstractmethod + def flag( + self, + flag_data: list[Any], + flag_option: str = "", + username: str | None = None, + ) -> int: + """ + This method should be overridden by the FlaggingCallback subclass and may contain optional additional arguments. + This gets called every time the button is pressed. + Parameters: + interface: The Interface object that is being used to launch the flagging interface. + flag_data: The data to be flagged. + flag_option (optional): In the case that flagging_options are provided, the flag option that is being used. + username (optional): The username of the user that is flagging the data, if logged in. + Returns: + (int) The total number of samples that have been flagged. + """ + pass + + +@document() +class SimpleCSVLogger(FlaggingCallback): + """ + A simplified implementation of the FlaggingCallback abstract class + provided for illustrative purposes. Each flagged sample (both the input and output data) + is logged to a CSV file on the machine running the gradio app. + Example: + import gradio as gr + def image_classifier(inp): + return {'cat': 0.3, 'dog': 0.7} + demo = gr.Interface(fn=image_classifier, inputs="image", outputs="label", + flagging_callback=SimpleCSVLogger()) + """ + + def __init__(self): + pass + + def setup(self, components: list[IOComponent], flagging_dir: str | Path): + self.components = components + self.flagging_dir = flagging_dir + os.makedirs(flagging_dir, exist_ok=True) + + def flag( + self, + flag_data: list[Any], + flag_option: str = "", + username: str | None = None, + ) -> int: + flagging_dir = self.flagging_dir + log_filepath = Path(flagging_dir) / "log.csv" + + csv_data = [] + for component, sample in zip(self.components, flag_data): + save_dir = Path( + flagging_dir + ) / client_utils.strip_invalid_filename_characters(component.label or "") + csv_data.append( + component.deserialize( + sample, + save_dir, + None, + ) + ) + + with open(log_filepath, "a", newline="") as csvfile: + writer = csv.writer(csvfile) + writer.writerow(utils.sanitize_list_for_csv(csv_data)) + + with open(log_filepath) as csvfile: + line_count = len(list(csv.reader(csvfile))) - 1 + return line_count + + +@document() +class CSVLogger(FlaggingCallback): + """ + The default implementation of the FlaggingCallback abstract class. Each flagged + sample (both the input and output data) is logged to a CSV file with headers on the machine running the gradio app. + Example: + import gradio as gr + def image_classifier(inp): + return {'cat': 0.3, 'dog': 0.7} + demo = gr.Interface(fn=image_classifier, inputs="image", outputs="label", + flagging_callback=CSVLogger()) + Guides: using-flagging + """ + + def __init__(self): + pass + + def setup( + self, + components: list[IOComponent], + flagging_dir: str | Path, + ): + self.components = components + self.flagging_dir = flagging_dir + os.makedirs(flagging_dir, exist_ok=True) + + def flag( + self, + flag_data: list[Any], + flag_option: str = "", + username: str | None = None, + ) -> int: + flagging_dir = self.flagging_dir + log_filepath = Path(flagging_dir) / "log.csv" + is_new = not Path(log_filepath).exists() + headers = [ + getattr(component, "label", None) or f"component {idx}" + for idx, component in enumerate(self.components) + ] + [ + "flag", + "username", + "timestamp", + ] + + csv_data = [] + for idx, (component, sample) in enumerate(zip(self.components, flag_data)): + save_dir = Path( + flagging_dir + ) / client_utils.strip_invalid_filename_characters( + getattr(component, "label", None) or f"component {idx}" + ) + if utils.is_update(sample): + csv_data.append(str(sample)) + else: + csv_data.append( + component.deserialize(sample, save_dir=save_dir) + if sample is not None + else "" + ) + csv_data.append(flag_option) + csv_data.append(username if username is not None else "") + csv_data.append(str(datetime.datetime.now())) + + with open(log_filepath, "a", newline="", encoding="utf-8") as csvfile: + writer = csv.writer(csvfile) + if is_new: + writer.writerow(utils.sanitize_list_for_csv(headers)) + writer.writerow(utils.sanitize_list_for_csv(csv_data)) + + with open(log_filepath, encoding="utf-8") as csvfile: + line_count = len(list(csv.reader(csvfile))) - 1 + return line_count + + +@document() +class HuggingFaceDatasetSaver(FlaggingCallback): + """ + A callback that saves each flagged sample (both the input and output data) to a HuggingFace dataset. + + Example: + import gradio as gr + hf_writer = gr.HuggingFaceDatasetSaver(HF_API_TOKEN, "image-classification-mistakes") + def image_classifier(inp): + return {'cat': 0.3, 'dog': 0.7} + demo = gr.Interface(fn=image_classifier, inputs="image", outputs="label", + allow_flagging="manual", flagging_callback=hf_writer) + Guides: using-flagging + """ + + def __init__( + self, + hf_token: str, + dataset_name: str, + organization: str | None = None, + private: bool = False, + info_filename: str = "dataset_info.json", + separate_dirs: bool = False, + verbose: bool = True, # silently ignored. TODO: remove it? + ): + """ + Parameters: + hf_token: The HuggingFace token to use to create (and write the flagged sample to) the HuggingFace dataset (defaults to the registered one). + dataset_name: The repo_id of the dataset to save the data to, e.g. "image-classifier-1" or "username/image-classifier-1". + organization: Deprecated argument. Please pass a full dataset id (e.g. 'username/dataset_name') to `dataset_name` instead. + private: Whether the dataset should be private (defaults to False). + info_filename: The name of the file to save the dataset info (defaults to "dataset_infos.json"). + separate_dirs: If True, each flagged item will be saved in a separate directory. This makes the flagging more robust to concurrent editing, but may be less convenient to use. + """ + if organization is not None: + warn_deprecation( + "Parameter `organization` is not used anymore. Please pass a full dataset id (e.g. 'username/dataset_name') to `dataset_name` instead." + ) + self.hf_token = hf_token + self.dataset_id = dataset_name # TODO: rename parameter (but ensure backward compatibility somehow) + self.dataset_private = private + self.info_filename = info_filename + self.separate_dirs = separate_dirs + + def setup(self, components: list[IOComponent], flagging_dir: str): + """ + Params: + flagging_dir (str): local directory where the dataset is cloned, + updated, and pushed from. + """ + # Setup dataset on the Hub + self.dataset_id = huggingface_hub.create_repo( + repo_id=self.dataset_id, + token=self.hf_token, + private=self.dataset_private, + repo_type="dataset", + exist_ok=True, + ).repo_id + path_glob = "**/*.jsonl" if self.separate_dirs else "data.csv" + huggingface_hub.metadata_update( + repo_id=self.dataset_id, + repo_type="dataset", + metadata={ + "configs": [ + { + "config_name": "default", + "data_files": [{"split": "train", "path": path_glob}], + } + ] + }, + overwrite=True, + token=self.hf_token, + ) + + # Setup flagging dir + self.components = components + self.dataset_dir = ( + Path(flagging_dir).absolute() / self.dataset_id.split("/")[-1] + ) + self.dataset_dir.mkdir(parents=True, exist_ok=True) + self.infos_file = self.dataset_dir / self.info_filename + + # Download remote files to local + remote_files = [self.info_filename] + if not self.separate_dirs: + # No separate dirs => means all data is in the same CSV file => download it to get its current content + remote_files.append("data.csv") + + for filename in remote_files: + try: + huggingface_hub.hf_hub_download( + repo_id=self.dataset_id, + repo_type="dataset", + filename=filename, + local_dir=self.dataset_dir, + token=self.hf_token, + ) + except huggingface_hub.utils.EntryNotFoundError: + pass + + def flag( + self, + flag_data: list[Any], + flag_option: str = "", + username: str | None = None, + ) -> int: + if self.separate_dirs: + # JSONL files to support dataset preview on the Hub + unique_id = str(uuid.uuid4()) + components_dir = self.dataset_dir / unique_id + data_file = components_dir / "metadata.jsonl" + path_in_repo = unique_id # upload in sub folder (safer for concurrency) + else: + # Unique CSV file + components_dir = self.dataset_dir + data_file = components_dir / "data.csv" + path_in_repo = None # upload at root level + + return self._flag_in_dir( + data_file=data_file, + components_dir=components_dir, + path_in_repo=path_in_repo, + flag_data=flag_data, + flag_option=flag_option, + username=username or "", + ) + + def _flag_in_dir( + self, + data_file: Path, + components_dir: Path, + path_in_repo: str | None, + flag_data: list[Any], + flag_option: str = "", + username: str = "", + ) -> int: + # Deserialize components (write images/audio to files) + features, row = self._deserialize_components( + components_dir, flag_data, flag_option, username + ) + + # Write generic info to dataset_infos.json + upload + with filelock.FileLock(str(self.infos_file) + ".lock"): + if not self.infos_file.exists(): + self.infos_file.write_text( + json.dumps({"flagged": {"features": features}}) + ) + + huggingface_hub.upload_file( + repo_id=self.dataset_id, + repo_type="dataset", + token=self.hf_token, + path_in_repo=self.infos_file.name, + path_or_fileobj=self.infos_file, + ) + + headers = list(features.keys()) + + if not self.separate_dirs: + with filelock.FileLock(components_dir / ".lock"): + sample_nb = self._save_as_csv(data_file, headers=headers, row=row) + sample_name = str(sample_nb) + huggingface_hub.upload_folder( + repo_id=self.dataset_id, + repo_type="dataset", + commit_message=f"Flagged sample #{sample_name}", + path_in_repo=path_in_repo, + ignore_patterns="*.lock", + folder_path=components_dir, + token=self.hf_token, + ) + else: + sample_name = self._save_as_jsonl(data_file, headers=headers, row=row) + sample_nb = len( + [path for path in self.dataset_dir.iterdir() if path.is_dir()] + ) + huggingface_hub.upload_folder( + repo_id=self.dataset_id, + repo_type="dataset", + commit_message=f"Flagged sample #{sample_name}", + path_in_repo=path_in_repo, + ignore_patterns="*.lock", + folder_path=components_dir, + token=self.hf_token, + ) + + return sample_nb + + @staticmethod + def _save_as_csv(data_file: Path, headers: list[str], row: list[Any]) -> int: + """Save data as CSV and return the sample name (row number).""" + is_new = not data_file.exists() + + with data_file.open("a", newline="", encoding="utf-8") as csvfile: + writer = csv.writer(csvfile) + + # Write CSV headers if new file + if is_new: + writer.writerow(utils.sanitize_list_for_csv(headers)) + + # Write CSV row for flagged sample + writer.writerow(utils.sanitize_list_for_csv(row)) + + with data_file.open(encoding="utf-8") as csvfile: + return sum(1 for _ in csv.reader(csvfile)) - 1 + + @staticmethod + def _save_as_jsonl(data_file: Path, headers: list[str], row: list[Any]) -> str: + """Save data as JSONL and return the sample name (uuid).""" + Path.mkdir(data_file.parent, parents=True, exist_ok=True) + with open(data_file, "w") as f: + json.dump(dict(zip(headers, row)), f) + return data_file.parent.name + + def _deserialize_components( + self, + data_dir: Path, + flag_data: list[Any], + flag_option: str = "", + username: str = "", + ) -> tuple[dict[Any, Any], list[Any]]: + """Deserialize components and return the corresponding row for the flagged sample. + + Images/audio are saved to disk as individual files. + """ + # Components that can have a preview on dataset repos + file_preview_types = {gr.Audio: "Audio", gr.Image: "Image"} + + # Generate the row corresponding to the flagged sample + features = OrderedDict() + row = [] + for component, sample in zip(self.components, flag_data): + # Get deserialized object (will save sample to disk if applicable -file, audio, image,...-) + label = component.label or "" + save_dir = data_dir / client_utils.strip_invalid_filename_characters(label) + deserialized = component.deserialize(sample, save_dir, None) + + # Add deserialized object to row + features[label] = {"dtype": "string", "_type": "Value"} + try: + assert Path(deserialized).exists() + row.append(str(Path(deserialized).relative_to(self.dataset_dir))) + except (AssertionError, TypeError, ValueError): + deserialized = "" if deserialized is None else str(deserialized) + row.append(deserialized) + + # If component is eligible for a preview, add the URL of the file + # Be mindful that images and audio can be None + if isinstance(component, tuple(file_preview_types)): # type: ignore + for _component, _type in file_preview_types.items(): + if isinstance(component, _component): + features[label + " file"] = {"_type": _type} + break + if deserialized: + path_in_repo = str( # returned filepath is absolute, we want it relative to compute URL + Path(deserialized).relative_to(self.dataset_dir) + ).replace( + "\\", "/" + ) + row.append( + huggingface_hub.hf_hub_url( + repo_id=self.dataset_id, + filename=path_in_repo, + repo_type="dataset", + ) + ) + else: + row.append("") + features["flag"] = {"dtype": "string", "_type": "Value"} + features["username"] = {"dtype": "string", "_type": "Value"} + row.append(flag_option) + row.append(username) + return features, row + + +class HuggingFaceDatasetJSONSaver(HuggingFaceDatasetSaver): + def __init__( + self, + hf_token: str, + dataset_name: str, + organization: str | None = None, + private: bool = False, + info_filename: str = "dataset_info.json", + verbose: bool = True, # silently ignored. TODO: remove it? + ): + warn_deprecation( + "Callback `HuggingFaceDatasetJSONSaver` is deprecated in favor of using" + " `HuggingFaceDatasetSaver` and passing `separate_dirs=True` as parameter." + ) + super().__init__( + hf_token=hf_token, + dataset_name=dataset_name, + organization=organization, + private=private, + info_filename=info_filename, + separate_dirs=True, + ) + + +class FlagMethod: + """ + Helper class that contains the flagging options and calls the flagging method. Also + provides visual feedback to the user when flag is clicked. + """ + + def __init__( + self, + flagging_callback: FlaggingCallback, + label: str, + value: str, + visual_feedback: bool = True, + ): + self.flagging_callback = flagging_callback + self.label = label + self.value = value + self.__name__ = "Flag" + self.visual_feedback = visual_feedback + + def __call__(self, request: gr.Request, *flag_data): + try: + self.flagging_callback.flag( + list(flag_data), flag_option=self.value, username=request.username + ) + except Exception as e: + print(f"Error while flagging: {e}") + if self.visual_feedback: + return "Error!" + if not self.visual_feedback: + return + time.sleep(0.8) # to provide enough time for the user to observe button change + return self.reset() + + def reset(self): + return gr.Button.update(value=self.label, interactive=True) diff --git a/testbed/gradio-app__gradio/gradio/helpers.py b/testbed/gradio-app__gradio/gradio/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..87ddab00bbd0f71586976454ed687d4545338388 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/helpers.py @@ -0,0 +1,1159 @@ +""" +Defines helper methods useful for loading and caching Interface examples. +""" +from __future__ import annotations + +import ast +import csv +import inspect +import os +import shutil +import subprocess +import tempfile +import threading +import warnings +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Iterable, Literal, Optional + +import matplotlib.pyplot as plt +import numpy as np +import PIL +import PIL.Image +from gradio_client import utils as client_utils +from gradio_client.documentation import document, set_documentation_group +from matplotlib import animation + +from gradio import components, oauth, processing_utils, routes, utils +from gradio.context import Context +from gradio.exceptions import Error +from gradio.flagging import CSVLogger + +if TYPE_CHECKING: # Only import for type checking (to avoid circular imports). + from gradio.blocks import Block + from gradio.components import IOComponent + +CACHED_FOLDER = "gradio_cached_examples" +LOG_FILE = "log.csv" + +set_documentation_group("helpers") + + +def create_examples( + examples: list[Any] | list[list[Any]] | str, + inputs: IOComponent | list[IOComponent], + outputs: IOComponent | list[IOComponent] | None = None, + fn: Callable | None = None, + cache_examples: bool = False, + examples_per_page: int = 10, + _api_mode: bool = False, + label: str | None = None, + elem_id: str | None = None, + run_on_click: bool = False, + preprocess: bool = True, + postprocess: bool = True, + api_name: str | None | Literal[False] = False, + batch: bool = False, +): + """Top-level synchronous function that creates Examples. Provided for backwards compatibility, i.e. so that gr.Examples(...) can be used to create the Examples component.""" + examples_obj = Examples( + examples=examples, + inputs=inputs, + outputs=outputs, + fn=fn, + cache_examples=cache_examples, + examples_per_page=examples_per_page, + _api_mode=_api_mode, + label=label, + elem_id=elem_id, + run_on_click=run_on_click, + preprocess=preprocess, + postprocess=postprocess, + api_name=api_name, + batch=batch, + _initiated_directly=False, + ) + client_utils.synchronize_async(examples_obj.create) + return examples_obj + + +@document() +class Examples: + """ + This class is a wrapper over the Dataset component and can be used to create Examples + for Blocks / Interfaces. Populates the Dataset component with examples and + assigns event listener so that clicking on an example populates the input/output + components. Optionally handles example caching for fast inference. + + Demos: blocks_inputs, fake_gan + Guides: more-on-examples-and-flagging, using-hugging-face-integrations, image-classification-in-pytorch, image-classification-in-tensorflow, image-classification-with-vision-transformers, create-your-own-friends-with-a-gan + """ + + def __init__( + self, + examples: list[Any] | list[list[Any]] | str, + inputs: IOComponent | list[IOComponent], + outputs: IOComponent | list[IOComponent] | None = None, + fn: Callable | None = None, + cache_examples: bool = False, + examples_per_page: int = 10, + _api_mode: bool = False, + label: str | None = "Examples", + elem_id: str | None = None, + run_on_click: bool = False, + preprocess: bool = True, + postprocess: bool = True, + api_name: str | None | Literal[False] = False, + batch: bool = False, + _initiated_directly: bool = True, + ): + """ + Parameters: + examples: example inputs that can be clicked to populate specific components. Should be nested list, in which the outer list consists of samples and each inner list consists of an input corresponding to each input component. A string path to a directory of examples can also be provided but it should be within the directory with the python file running the gradio app. If there are multiple input components and a directory is provided, a log.csv file must be present in the directory to link corresponding inputs. + inputs: the component or list of components corresponding to the examples + outputs: optionally, provide the component or list of components corresponding to the output of the examples. Required if `cache` is True. + fn: optionally, provide the function to run to generate the outputs corresponding to the examples. Required if `cache` is True. + cache_examples: if True, caches examples for fast runtime. If True, then `fn` and `outputs` must be provided. If `fn` is a generator function, then the last yielded value will be used as the output. + examples_per_page: how many examples to show per page. + label: the label to use for the examples component (by default, "Examples") + elem_id: an optional string that is assigned as the id of this component in the HTML DOM. + run_on_click: if cache_examples is False, clicking on an example does not run the function when an example is clicked. Set this to True to run the function when an example is clicked. Has no effect if cache_examples is True. + preprocess: if True, preprocesses the example input before running the prediction function and caching the output. Only applies if cache_examples is True. + postprocess: if True, postprocesses the example output after running the prediction function and before caching. Only applies if cache_examples is True. + api_name: Defines how the event associated with clicking on the examples appears in the API docs. Can be a string, None, or False. If False (default), the endpoint will not be exposed in the api docs. If set to None, the endpoint will be exposed in the api docs as an unnamed endpoint, although this behavior will be changed in Gradio 4.0. If set to a string, the endpoint will be exposed in the api docs with the given name. + batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. Used only if cache_examples is True. + """ + if _initiated_directly: + warnings.warn( + "Please use gr.Examples(...) instead of gr.examples.Examples(...) to create the Examples.", + ) + + if cache_examples and (fn is None or outputs is None): + raise ValueError("If caching examples, `fn` and `outputs` must be provided") + + if not isinstance(inputs, list): + inputs = [inputs] + if outputs and not isinstance(outputs, list): + outputs = [outputs] + + working_directory = Path().absolute() + + if examples is None: + raise ValueError("The parameter `examples` cannot be None") + elif isinstance(examples, list) and ( + len(examples) == 0 or isinstance(examples[0], list) + ): + pass + elif ( + isinstance(examples, list) and len(inputs) == 1 + ): # If there is only one input component, examples can be provided as a regular list instead of a list of lists + examples = [[e] for e in examples] + elif isinstance(examples, str): + if not Path(examples).exists(): + raise FileNotFoundError( + f"Could not find examples directory: {examples}" + ) + working_directory = examples + if not (Path(examples) / LOG_FILE).exists(): + if len(inputs) == 1: + examples = [[e] for e in os.listdir(examples)] + else: + raise FileNotFoundError( + "Could not find log file (required for multiple inputs): " + + LOG_FILE + ) + else: + with open(Path(examples) / LOG_FILE) as logs: + examples = list(csv.reader(logs)) + examples = [ + examples[i][: len(inputs)] for i in range(1, len(examples)) + ] # remove header and unnecessary columns + + else: + raise ValueError( + "The parameter `examples` must either be a string directory or a list" + "(if there is only 1 input component) or (more generally), a nested " + "list, where each sublist represents a set of inputs." + ) + + input_has_examples = [False] * len(inputs) + for example in examples: + for idx, example_for_input in enumerate(example): + if example_for_input is not None: + try: + input_has_examples[idx] = True + except IndexError: + pass # If there are more example components than inputs, ignore. This can sometimes be intentional (e.g. loading from a log file where outputs and timestamps are also logged) + + inputs_with_examples = [ + inp for (inp, keep) in zip(inputs, input_has_examples) if keep + ] + non_none_examples = [ + [ex for (ex, keep) in zip(example, input_has_examples) if keep] + for example in examples + ] + + self.examples = examples + self.non_none_examples = non_none_examples + self.inputs = inputs + self.inputs_with_examples = inputs_with_examples + self.outputs = outputs or [] + self.fn = fn + self.cache_examples = cache_examples + self._api_mode = _api_mode + self.preprocess = preprocess + self.postprocess = postprocess + self.api_name = api_name + self.batch = batch + + with utils.set_directory(working_directory): + self.processed_examples = [ + [ + component.postprocess(sample) + for component, sample in zip(inputs, example) + ] + for example in examples + ] + self.non_none_processed_examples = [ + [ex for (ex, keep) in zip(example, input_has_examples) if keep] + for example in self.processed_examples + ] + if cache_examples: + for example in self.examples: + if len([ex for ex in example if ex is not None]) != len(self.inputs): + warnings.warn( + "Examples are being cached but not all input components have " + "example values. This may result in an exception being thrown by " + "your function. If you do get an error while caching examples, make " + "sure all of your inputs have example values for all of your examples " + "or you provide default values for those particular parameters in your function." + ) + break + + from gradio import components + + with utils.set_directory(working_directory): + self.dataset = components.Dataset( + components=inputs_with_examples, + samples=non_none_examples, + type="index", + label=label, + samples_per_page=examples_per_page, + elem_id=elem_id, + ) + + self.cached_folder = Path(CACHED_FOLDER) / str(self.dataset._id) + self.cached_file = Path(self.cached_folder) / "log.csv" + self.cache_examples = cache_examples + self.run_on_click = run_on_click + + async def create(self) -> None: + """Caches the examples if self.cache_examples is True and creates the Dataset + component to hold the examples""" + + async def load_example(example_id): + processed_example = self.non_none_processed_examples[example_id] + return utils.resolve_singleton(processed_example) + + if Context.root_block: + self.load_input_event = self.dataset.click( + load_example, + inputs=[self.dataset], + outputs=self.inputs_with_examples, # type: ignore + show_progress="hidden", + postprocess=False, + queue=False, + api_name=self.api_name, # type: ignore + ) + if self.run_on_click and not self.cache_examples: + if self.fn is None: + raise ValueError("Cannot run_on_click if no function is provided") + self.load_input_event.then( + self.fn, + inputs=self.inputs, # type: ignore + outputs=self.outputs, # type: ignore + ) + + if self.cache_examples: + await self.cache() + + async def cache(self) -> None: + """ + Caches all of the examples so that their predictions can be shown immediately. + """ + if Context.root_block is None: + raise ValueError("Cannot cache examples if not in a Blocks context") + if Path(self.cached_file).exists(): + print( + f"Using cache from '{utils.abspath(self.cached_folder)}' directory. If method or examples have changed since last caching, delete this folder to clear cache.\n" + ) + else: + print(f"Caching examples at: '{utils.abspath(self.cached_folder)}'") + cache_logger = CSVLogger() + + generated_values = [] + if inspect.isgeneratorfunction(self.fn): + + def get_final_item(*args): # type: ignore + x = None + generated_values.clear() + for x in self.fn(*args): # noqa: B007 # type: ignore + generated_values.append(x) + return x + + fn = get_final_item + elif inspect.isasyncgenfunction(self.fn): + + async def get_final_item(*args): + x = None + generated_values.clear() + async for x in self.fn(*args): # noqa: B007 # type: ignore + generated_values.append(x) + return x + + fn = get_final_item + else: + fn = self.fn + + # create a fake dependency to process the examples and get the predictions + from gradio.events import EventListenerMethod + + dependency, fn_index = Context.root_block.set_event_trigger( + [EventListenerMethod(Context.root_block, "load")], + fn=fn, + inputs=self.inputs_with_examples, # type: ignore + outputs=self.outputs, # type: ignore + preprocess=self.preprocess and not self._api_mode, + postprocess=self.postprocess and not self._api_mode, + batch=self.batch, + ) + + assert self.outputs is not None + cache_logger.setup(self.outputs, self.cached_folder) + for example_id, _ in enumerate(self.examples): + print(f"Caching example {example_id + 1}/{len(self.examples)}") + processed_input = self.processed_examples[example_id] + if self.batch: + processed_input = [[value] for value in processed_input] + with utils.MatplotlibBackendMananger(): + prediction = await Context.root_block.process_api( + fn_index=fn_index, + inputs=processed_input, + request=None, + ) + output = prediction["data"] + if len(generated_values): + output = merge_generated_values_into_output( + self.outputs, generated_values, output + ) + + if self.batch: + output = [value[0] for value in output] + cache_logger.flag(output) + # Remove the "fake_event" to prevent bugs in loading interfaces from spaces + Context.root_block.dependencies.remove(dependency) + Context.root_block.fns.pop(fn_index) + + # Remove the original load_input_event and replace it with one that + # also populates the input. We do it this way to to allow the cache() + # method to be called independently of the create() method + index = Context.root_block.dependencies.index(self.load_input_event) + Context.root_block.dependencies.pop(index) + Context.root_block.fns.pop(index) + + async def load_example(example_id): + processed_example = self.non_none_processed_examples[ + example_id + ] + await self.load_from_cache(example_id) + return utils.resolve_singleton(processed_example) + + self.load_input_event = self.dataset.click( + load_example, + inputs=[self.dataset], + outputs=self.inputs_with_examples + self.outputs, # type: ignore + show_progress="hidden", + postprocess=False, + queue=False, + api_name=self.api_name, # type: ignore + ) + + async def load_from_cache(self, example_id: int) -> list[Any]: + """Loads a particular cached example for the interface. + Parameters: + example_id: The id of the example to process (zero-indexed). + """ + with open(self.cached_file, encoding="utf-8") as cache: + examples = list(csv.reader(cache)) + example = examples[example_id + 1] # +1 to adjust for header + output = [] + assert self.outputs is not None + for component, value in zip(self.outputs, example): + value_to_use = value + try: + value_as_dict = ast.literal_eval(value) + # File components that output multiple files get saved as a python list + # need to pass the parsed list to serialize + # TODO: Better file serialization in 4.0 + if isinstance(value_as_dict, list) and isinstance( + component, components.File + ): + value_to_use = value_as_dict + assert utils.is_update(value_as_dict) + output.append(value_as_dict) + except (ValueError, TypeError, SyntaxError, AssertionError): + output.append( + component.serialize( + value_to_use, self.cached_folder, allow_links=True + ) + ) + return output + + +def merge_generated_values_into_output( + components: list[IOComponent], generated_values: list, output: list +): + from gradio.events import StreamableOutput + + for output_index, output_component in enumerate(components): + if ( + isinstance(output_component, StreamableOutput) + and output_component.streaming + ): + binary_chunks = [] + for i, chunk in enumerate(generated_values): + if len(components) > 1: + chunk = chunk[output_index] + processed_chunk = output_component.postprocess(chunk) + binary_chunks.append( + output_component.stream_output(processed_chunk, "", i == 0)[0] + ) + binary_data = b"".join(binary_chunks) + tempdir = os.environ.get("GRADIO_TEMP_DIR") or str( + Path(tempfile.gettempdir()) / "gradio" + ) + os.makedirs(tempdir, exist_ok=True) + temp_file = tempfile.NamedTemporaryFile(dir=tempdir, delete=False) + with open(temp_file.name, "wb") as f: + f.write(binary_data) + + output[output_index] = { + "name": temp_file.name, + "is_file": True, + "data": None, + } + + return output + + +class TrackedIterable: + def __init__( + self, + iterable: Iterable | None, + index: int | None, + length: int | None, + desc: str | None, + unit: str | None, + _tqdm=None, + progress: float | None = None, + ) -> None: + self.iterable = iterable + self.index = index + self.length = length + self.desc = desc + self.unit = unit + self._tqdm = _tqdm + self.progress = progress + + +@document("__call__", "tqdm") +class Progress(Iterable): + """ + The Progress class provides a custom progress tracker that is used in a function signature. + To attach a Progress tracker to a function, simply add a parameter right after the input parameters that has a default value set to a `gradio.Progress()` instance. + The Progress tracker can then be updated in the function by calling the Progress object or using the `tqdm` method on an Iterable. + The Progress tracker is currently only available with `queue()`. + Example: + import gradio as gr + import time + def my_function(x, progress=gr.Progress()): + progress(0, desc="Starting...") + time.sleep(1) + for i in progress.tqdm(range(100)): + time.sleep(0.1) + return x + gr.Interface(my_function, gr.Textbox(), gr.Textbox()).queue().launch() + Demos: progress + """ + + def __init__( + self, + track_tqdm: bool = False, + _callback: Callable | None = None, # for internal use only + _event_id: str | None = None, + ): + """ + Parameters: + track_tqdm: If True, the Progress object will track any tqdm.tqdm iterations with the tqdm library in the function. + """ + self.track_tqdm = track_tqdm + self._callback = _callback + self._event_id = _event_id + self.iterables: list[TrackedIterable] = [] + + def __len__(self): + return self.iterables[-1].length + + def __iter__(self): + return self + + def __next__(self): + """ + Updates progress tracker with next item in iterable. + """ + if self._callback: + current_iterable = self.iterables[-1] + while ( + not hasattr(current_iterable.iterable, "__next__") + and len(self.iterables) > 0 + ): + current_iterable = self.iterables.pop() + self._callback( + event_id=self._event_id, + iterables=self.iterables, + ) + assert current_iterable.index is not None, "Index not set." + current_iterable.index += 1 + try: + return next(current_iterable.iterable) # type: ignore + except StopIteration: + self.iterables.pop() + raise + else: + return self + + def __call__( + self, + progress: float | tuple[int, int | None] | None, + desc: str | None = None, + total: int | None = None, + unit: str = "steps", + _tqdm=None, + ): + """ + Updates progress tracker with progress and message text. + Parameters: + progress: If float, should be between 0 and 1 representing completion. If Tuple, first number represents steps completed, and second value represents total steps or None if unknown. If None, hides progress bar. + desc: description to display. + total: estimated total number of steps. + unit: unit of iterations. + """ + if self._callback: + if isinstance(progress, tuple): + index, total = progress + progress = None + else: + index = None + self._callback( + event_id=self._event_id, + iterables=self.iterables + + [TrackedIterable(None, index, total, desc, unit, _tqdm, progress)], + ) + else: + return progress + + def tqdm( + self, + iterable: Iterable | None, + desc: str | None = None, + total: int | None = None, + unit: str = "steps", + _tqdm=None, + ): + """ + Attaches progress tracker to iterable, like tqdm. + Parameters: + iterable: iterable to attach progress tracker to. + desc: description to display. + total: estimated total number of steps. + unit: unit of iterations. + """ + if self._callback: + if iterable is None: + new_iterable = TrackedIterable(None, 0, total, desc, unit, _tqdm) + self.iterables.append(new_iterable) + self._callback(event_id=self._event_id, iterables=self.iterables) + return self + length = len(iterable) if hasattr(iterable, "__len__") else None # type: ignore + self.iterables.append( + TrackedIterable(iter(iterable), 0, length, desc, unit, _tqdm) + ) + return self + + def update(self, n=1): + """ + Increases latest iterable with specified number of steps. + Parameters: + n: number of steps completed. + """ + if self._callback and len(self.iterables) > 0: + current_iterable = self.iterables[-1] + assert current_iterable.index is not None, "Index not set." + current_iterable.index += n + self._callback( + event_id=self._event_id, + iterables=self.iterables, + ) + else: + return + + def close(self, _tqdm): + """ + Removes iterable with given _tqdm. + """ + if self._callback: + for i in range(len(self.iterables)): + if id(self.iterables[i]._tqdm) == id(_tqdm): + self.iterables.pop(i) + break + self._callback( + event_id=self._event_id, + iterables=self.iterables, + ) + else: + return + + +def create_tracker(root_blocks, event_id, fn, track_tqdm): + progress = Progress(_callback=root_blocks._queue.set_progress, _event_id=event_id) + if not track_tqdm: + return progress, fn + + try: + _tqdm = __import__("tqdm") + except ModuleNotFoundError: + return progress, fn + if not hasattr(root_blocks, "_progress_tracker_per_thread"): + root_blocks._progress_tracker_per_thread = {} + + def init_tqdm( + self, iterable=None, desc=None, total=None, unit="steps", *args, **kwargs + ): + self._progress = root_blocks._progress_tracker_per_thread.get( + threading.get_ident() + ) + if self._progress is not None: + self._progress.event_id = event_id + self._progress.tqdm(iterable, desc, total, unit, _tqdm=self) + kwargs["file"] = open(os.devnull, "w") # noqa: SIM115 + self.__init__orig__(iterable, desc, total, *args, unit=unit, **kwargs) + + def iter_tqdm(self): + if self._progress is not None: + return self._progress + else: + return self.__iter__orig__() + + def update_tqdm(self, n=1): + if self._progress is not None: + self._progress.update(n) + return self.__update__orig__(n) + + def close_tqdm(self): + if self._progress is not None: + self._progress.close(self) + return self.__close__orig__() + + def exit_tqdm(self, exc_type, exc_value, traceback): + if self._progress is not None: + self._progress.close(self) + return self.__exit__orig__(exc_type, exc_value, traceback) + + if not hasattr(_tqdm.tqdm, "__init__orig__"): + _tqdm.tqdm.__init__orig__ = _tqdm.tqdm.__init__ + _tqdm.tqdm.__init__ = init_tqdm + if not hasattr(_tqdm.tqdm, "__update__orig__"): + _tqdm.tqdm.__update__orig__ = _tqdm.tqdm.update + _tqdm.tqdm.update = update_tqdm + if not hasattr(_tqdm.tqdm, "__close__orig__"): + _tqdm.tqdm.__close__orig__ = _tqdm.tqdm.close + _tqdm.tqdm.close = close_tqdm + if not hasattr(_tqdm.tqdm, "__exit__orig__"): + _tqdm.tqdm.__exit__orig__ = _tqdm.tqdm.__exit__ + _tqdm.tqdm.__exit__ = exit_tqdm + if not hasattr(_tqdm.tqdm, "__iter__orig__"): + _tqdm.tqdm.__iter__orig__ = _tqdm.tqdm.__iter__ + _tqdm.tqdm.__iter__ = iter_tqdm + if hasattr(_tqdm, "auto") and hasattr(_tqdm.auto, "tqdm"): + _tqdm.auto.tqdm = _tqdm.tqdm + + def before_fn(): + thread_id = threading.get_ident() + root_blocks._progress_tracker_per_thread[thread_id] = progress + + def after_fn(): + thread_id = threading.get_ident() + del root_blocks._progress_tracker_per_thread[thread_id] + + tracked_fn = utils.function_wrapper(fn, before_fn=before_fn, after_fn=after_fn) + + return progress, tracked_fn + + +def special_args( + fn: Callable, + inputs: list[Any] | None = None, + request: routes.Request | None = None, + event_data: EventData | None = None, +): + """ + Checks if function has special arguments Request or EventData (via annotation) or Progress (via default value). + If inputs is provided, these values will be loaded into the inputs array. + Parameters: + fn: function to check. + inputs: array to load special arguments into. + request: request to load into inputs. + event_data: event-related data to load into inputs. + Returns: + updated inputs, progress index, event data index. + """ + signature = inspect.signature(fn) + type_hints = utils.get_type_hints(fn) + positional_args = [] + for param in signature.parameters.values(): + if param.kind not in (param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD): + break + positional_args.append(param) + progress_index = None + event_data_index = None + for i, param in enumerate(positional_args): + type_hint = type_hints.get(param.name) + if isinstance(param.default, Progress): + progress_index = i + if inputs is not None: + inputs.insert(i, param.default) + elif type_hint == routes.Request: + if inputs is not None: + inputs.insert(i, request) + elif ( + type_hint == Optional[oauth.OAuthProfile] + or type_hint == oauth.OAuthProfile + # Note: "OAuthProfile | None" is equals to Optional[OAuthProfile] in Python + # => it is automatically handled as well by the above condition + # (adding explicit "OAuthProfile | None" would break in Python3.9) + ): + if inputs is not None: + # Retrieve session from gr.Request, if it exists (i.e. if user is logged in) + session = ( + # request.session (if fastapi.Request obj i.e. direct call) + getattr(request, "session", {}) + or + # or request.request.session (if gr.Request obj i.e. websocket call) + getattr(getattr(request, "request", None), "session", {}) + ) + oauth_profile = ( + session["oauth_profile"] if "oauth_profile" in session else None + ) + if type_hint == oauth.OAuthProfile and oauth_profile is None: + raise Error( + "This action requires a logged in user. Please sign in and retry." + ) + inputs.insert(i, oauth_profile) + elif ( + type_hint + and inspect.isclass(type_hint) + and issubclass(type_hint, EventData) + ): + event_data_index = i + if inputs is not None and event_data is not None: + inputs.insert(i, type_hint(event_data.target, event_data._data)) + elif ( + param.default is not param.empty and inputs is not None and len(inputs) <= i + ): + inputs.insert(i, param.default) + if inputs is not None: + while len(inputs) < len(positional_args): + i = len(inputs) + param = positional_args[i] + if param.default == param.empty: + warnings.warn("Unexpected argument. Filling with None.") + inputs.append(None) + else: + inputs.append(param.default) + return inputs or [], progress_index, event_data_index + + +def update(**kwargs) -> dict: + """ + DEPRECATED. Updates component properties. When a function passed into a Gradio Interface or a Blocks events returns a typical value, it updates the value of the output component. But it is also possible to update the properties of an output component (such as the number of lines of a `Textbox` or the visibility of an `Image`) by returning the component's `update()` function, which takes as parameters any of the constructor parameters for that component. + This is a shorthand for using the update method on a component. + For example, rather than using gr.Number.update(...) you can just use gr.update(...). + Note that your editor's autocompletion will suggest proper parameters + if you use the update method on the component. + Demos: blocks_essay, blocks_update, blocks_essay_update + + Parameters: + kwargs: Key-word arguments used to update the component's properties. + Example: + # Blocks Example + import gradio as gr + with gr.Blocks() as demo: + radio = gr.Radio([1, 2, 4], label="Set the value of the number") + number = gr.Number(value=2, interactive=True) + radio.change(fn=lambda value: gr.update(value=value), inputs=radio, outputs=number) + demo.launch() + + # Interface example + import gradio as gr + def change_textbox(choice): + if choice == "short": + return gr.Textbox.update(lines=2, visible=True) + elif choice == "long": + return gr.Textbox.update(lines=8, visible=True) + else: + return gr.Textbox.update(visible=False) + gr.Interface( + change_textbox, + gr.Radio( + ["short", "long", "none"], label="What kind of essay would you like to write?" + ), + gr.Textbox(lines=2), + live=True, + ).launch() + """ + warnings.warn( + "Using the update method is deprecated. Simply return a new object instead, e.g. `return gr.Textbox(...)` instead of `return gr.update(...)" + ) + kwargs["__type__"] = "generic_update" + return kwargs + + +def skip() -> dict: + return update() + + +@document() +def make_waveform( + audio: str | tuple[int, np.ndarray], + *, + bg_color: str = "#f3f4f6", + bg_image: str | None = None, + fg_alpha: float = 0.75, + bars_color: str | tuple[str, str] = ("#fbbf24", "#ea580c"), + bar_count: int = 50, + bar_width: float = 0.6, + animate: bool = False, +) -> str: + """ + Generates a waveform video from an audio file. Useful for creating an easy to share audio visualization. The output should be passed into a `gr.Video` component. + Parameters: + audio: Audio file path or tuple of (sample_rate, audio_data) + bg_color: Background color of waveform (ignored if bg_image is provided) + bg_image: Background image of waveform + fg_alpha: Opacity of foreground waveform + bars_color: Color of waveform bars. Can be a single color or a tuple of (start_color, end_color) of gradient + bar_count: Number of bars in waveform + bar_width: Width of bars in waveform. 1 represents full width, 0.5 represents half width, etc. + animate: If true, the audio waveform overlay will be animated, if false, it will be static. + Returns: + A filepath to the output video in mp4 format. + """ + if isinstance(audio, str): + audio_file = audio + audio = processing_utils.audio_from_file(audio) + else: + tmp_wav = tempfile.NamedTemporaryFile(suffix=".wav", delete=False) + processing_utils.audio_to_file(audio[0], audio[1], tmp_wav.name, format="wav") + audio_file = tmp_wav.name + + if not os.path.isfile(audio_file): + raise ValueError("Audio file not found.") + + ffmpeg = shutil.which("ffmpeg") + if not ffmpeg: + raise RuntimeError("ffmpeg not found.") + + duration = round(len(audio[1]) / audio[0], 4) + + # Helper methods to create waveform + def hex_to_rgb(hex_str): + return [int(hex_str[i : i + 2], 16) for i in range(1, 6, 2)] + + def get_color_gradient(c1, c2, n): + assert n > 1 + c1_rgb = np.array(hex_to_rgb(c1)) / 255 + c2_rgb = np.array(hex_to_rgb(c2)) / 255 + mix_pcts = [x / (n - 1) for x in range(n)] + rgb_colors = [((1 - mix) * c1_rgb + (mix * c2_rgb)) for mix in mix_pcts] + return [ + "#" + "".join(f"{int(round(val * 255)):02x}" for val in item) + for item in rgb_colors + ] + + # Reshape audio to have a fixed number of bars + samples = audio[1] + if len(samples.shape) > 1: + samples = np.mean(samples, 1) + bins_to_pad = bar_count - (len(samples) % bar_count) + samples = np.pad(samples, [(0, bins_to_pad)]) + samples = np.reshape(samples, (bar_count, -1)) + samples = np.abs(samples) + samples = np.max(samples, 1) + + with utils.MatplotlibBackendMananger(): + plt.clf() + # Plot waveform + color = ( + bars_color + if isinstance(bars_color, str) + else get_color_gradient(bars_color[0], bars_color[1], bar_count) + ) + + if animate: + fig = plt.figure(figsize=(5, 1), dpi=200, frameon=False) + fig.subplots_adjust(left=0, bottom=0, right=1, top=1) + plt.axis("off") + plt.margins(x=0) + + bar_alpha = fg_alpha if animate else 1.0 + barcollection = plt.bar( + np.arange(0, bar_count), + samples * 2, + bottom=(-1 * samples), + width=bar_width, + color=color, + alpha=bar_alpha, + ) + + tmp_img = tempfile.NamedTemporaryFile(suffix=".png", delete=False) + + savefig_kwargs: dict[str, Any] = {"bbox_inches": "tight"} + if bg_image is not None: + savefig_kwargs["transparent"] = True + if animate: + savefig_kwargs["facecolor"] = "none" + else: + savefig_kwargs["facecolor"] = bg_color + plt.savefig(tmp_img.name, **savefig_kwargs) + + if not animate: + waveform_img = PIL.Image.open(tmp_img.name) + waveform_img = waveform_img.resize((1000, 400)) + + # Composite waveform with background image + if bg_image is not None: + waveform_array = np.array(waveform_img) + waveform_array[:, :, 3] = waveform_array[:, :, 3] * fg_alpha + waveform_img = PIL.Image.fromarray(waveform_array) + + bg_img = PIL.Image.open(bg_image) + waveform_width, waveform_height = waveform_img.size + bg_width, bg_height = bg_img.size + if waveform_width != bg_width: + bg_img = bg_img.resize( + ( + waveform_width, + 2 * int(bg_height * waveform_width / bg_width / 2), + ) + ) + bg_width, bg_height = bg_img.size + composite_height = max(bg_height, waveform_height) + composite = PIL.Image.new( + "RGBA", (waveform_width, composite_height), "#FFFFFF" + ) + composite.paste(bg_img, (0, composite_height - bg_height)) + composite.paste( + waveform_img, (0, composite_height - waveform_height), waveform_img + ) + composite.save(tmp_img.name) + img_width, img_height = composite.size + else: + img_width, img_height = waveform_img.size + waveform_img.save(tmp_img.name) + else: + + def _animate(_): + for idx, b in enumerate(barcollection): + rand_height = np.random.uniform(0.8, 1.2) + b.set_height(samples[idx] * rand_height * 2) + b.set_y((-rand_height * samples)[idx]) + + frames = int(duration * 10) + anim = animation.FuncAnimation( + fig, # type: ignore + _animate, + repeat=False, + blit=False, + frames=frames, + interval=100, + ) + anim.save( + tmp_img.name, + writer="pillow", + fps=10, + codec="png", + savefig_kwargs=savefig_kwargs, + ) + + # Convert waveform to video with ffmpeg + output_mp4 = tempfile.NamedTemporaryFile(suffix=".mp4", delete=False) + + if animate and bg_image is not None: + ffmpeg_cmd = [ + ffmpeg, + "-loop", + "1", + "-i", + bg_image, + "-i", + tmp_img.name, + "-i", + audio_file, + "-filter_complex", + "[0:v]scale=w=trunc(iw/2)*2:h=trunc(ih/2)*2[bg];[1:v]format=rgba,colorchannelmixer=aa=1.0[ov];[bg][ov]overlay=(main_w-overlay_w*0.9)/2:main_h-overlay_h*0.9/2[output]", + "-t", + str(duration), + "-map", + "[output]", + "-map", + "2:a", + "-c:v", + "libx264", + "-c:a", + "aac", + "-shortest", + "-y", + output_mp4.name, + ] + elif animate and bg_image is None: + ffmpeg_cmd = [ + ffmpeg, + "-i", + tmp_img.name, + "-i", + audio_file, + "-filter_complex", + "[0:v][1:a]concat=n=1:v=1:a=1[v];[v]scale=1000:400,format=yuv420p[v_scaled]", + "-map", + "[v_scaled]", + "-map", + "1:a", + "-c:v", + "libx264", + "-c:a", + "aac", + "-shortest", + "-y", + output_mp4.name, + ] + else: + ffmpeg_cmd = [ + ffmpeg, + "-loop", + "1", + "-i", + tmp_img.name, + "-i", + audio_file, + "-vf", + f"color=c=#FFFFFF77:s={img_width}x{img_height}[bar];[0][bar]overlay=-w+(w/{duration})*t:H-h:shortest=1", # type: ignore + "-t", + str(duration), + "-y", + output_mp4.name, + ] + + subprocess.check_call(ffmpeg_cmd) + return output_mp4.name + + +@document() +class EventData: + """ + When a subclass of EventData is added as a type hint to an argument of an event listener method, this object will be passed as that argument. + It contains information about the event that triggered the listener, such the target object, and other data related to the specific event that are attributes of the subclass. + + Example: + table = gr.Dataframe([[1, 2, 3], [4, 5, 6]]) + gallery = gr.Gallery([("cat.jpg", "Cat"), ("dog.jpg", "Dog")]) + textbox = gr.Textbox("Hello World!") + + statement = gr.Textbox() + + def on_select(evt: gr.SelectData): # SelectData is a subclass of EventData + return f"You selected {evt.value} at {evt.index} from {evt.target}" + + table.select(on_select, None, statement) + gallery.select(on_select, None, statement) + textbox.select(on_select, None, statement) + Demos: gallery_selections, tictactoe + """ + + def __init__(self, target: Block | None, _data: Any): + """ + Parameters: + target: The target object that triggered the event. Can be used to distinguish if multiple components are bound to the same listener. + """ + self.target = target + self._data = _data + + +def log_message(message: str, level: Literal["info", "warning"] = "info"): + from gradio.context import LocalContext + + blocks = LocalContext.blocks.get() + if blocks is None: # Function called outside of Gradio + if level == "info": + print(message) + elif level == "warning": + warnings.warn(message) + return + if not blocks.enable_queue: + warnings.warn( + f"Queueing must be enabled to issue {level.capitalize()}: '{message}'." + ) + return + event_id = LocalContext.event_id.get() + assert event_id + blocks._queue.log_message(event_id=event_id, log=message, level=level) + + +set_documentation_group("modals") + + +@document() +def Warning(message: str = "Warning issued."): # noqa: N802 + """ + This function allows you to pass custom warning messages to the user. You can do so simply with `gr.Warning('message here')`, and when that line is executed the custom message will appear in a modal on the demo. The modal is yellow by default and has the heading: "Warning." Queue must be enabled to use Warning. + Demos: blocks_chained_events + Parameters: + message: The warning message to be displayed to the user. + Example: + import gradio as gr + def hello_world(): + gr.Warning('This is a warning message.') + return "hello world" + with gr.Blocks() as demo: + md = gr.Markdown() + demo.load(hello_world, inputs=None, outputs=[md]) + demo.queue().launch() + """ + log_message(message, level="warning") + + +@document() +def Info(message: str = "Info issued."): # noqa: N802 + """ + This function allows you to pass custom info messages to the user. You can do so simply by writing `gr.Info('message here')` in your function, and when that line is executed the custom message will appear in a (gray, by default) modal on the demo. Queue must be enabled to use Info. + Demos: blocks_chained_events + Parameters: + message: The info message to be displayed to the user. + Example: + import gradio as gr + def hello_world(): + gr.Info('This is some info.') + return "hello world" + with gr.Blocks() as demo: + md = gr.Markdown() + demo.load(hello_world, inputs=None, outputs=[md]) + demo.queue().launch() + """ + log_message(message, level="info") diff --git a/testbed/gradio-app__gradio/gradio/inputs.py b/testbed/gradio-app__gradio/gradio/inputs.py new file mode 100644 index 0000000000000000000000000000000000000000..9345530649a0b8843c27d7a0f965ac73bfcce7d6 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/inputs.py @@ -0,0 +1,451 @@ +# type: ignore +""" +This module defines various classes that can serve as the `input` to an interface. Each class must inherit from +`InputComponent`, and each class must define a path to its template. All of the subclasses of `InputComponent` are +automatically added to a registry, which allows them to be easily referenced in other parts of the code. +""" + +from __future__ import annotations + +from typing import Any, Optional + +from gradio import components +from gradio.deprecation import warn_deprecation + + +def warn_inputs_deprecation(): + warn_deprecation( + "Usage of gradio.inputs is deprecated, and will not be supported in the future, please import your component from gradio.components", + ) + + +class Textbox(components.Textbox): + def __init__( + self, + lines: int = 1, + placeholder: Optional[str] = None, + default: str = "", + numeric: Optional[bool] = False, + type: Optional[str] = "text", + label: Optional[str] = None, + optional: bool = False, + ): + warn_inputs_deprecation() + super().__init__( + value=default, + lines=lines, + placeholder=placeholder, + label=label, + numeric=numeric, + type=type, + optional=optional, + ) + + +class Number(components.Number): + """ + Component creates a field for user to enter numeric input. Provides a number as an argument to the wrapped function. + Input type: float + """ + + def __init__( + self, + default: Optional[float] = None, + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + default (float): default value. + label (str): component name in interface. + optional (bool): If True, the interface can be submitted with no value for this component. + """ + warn_inputs_deprecation() + super().__init__(value=default, label=label, optional=optional) + + +class Slider(components.Slider): + """ + Component creates a slider that ranges from `minimum` to `maximum`. Provides number as an argument to the wrapped function. + Input type: float + """ + + def __init__( + self, + minimum: float = 0, + maximum: float = 100, + step: Optional[float] = None, + default: Optional[float] = None, + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + minimum (float): minimum value for slider. + maximum (float): maximum value for slider. + step (float): increment between slider values. + default (float): default value. + label (str): component name in interface. + optional (bool): this parameter is ignored. + """ + warn_inputs_deprecation() + + super().__init__( + value=default, + minimum=minimum, + maximum=maximum, + step=step, + label=label, + optional=optional, + ) + + +class Checkbox(components.Checkbox): + """ + Component creates a checkbox that can be set to `True` or `False`. Provides a boolean as an argument to the wrapped function. + Input type: bool + """ + + def __init__( + self, + default: bool = False, + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + label (str): component name in interface. + default (bool): if True, checked by default. + optional (bool): this parameter is ignored. + """ + warn_inputs_deprecation() + super().__init__(value=default, label=label, optional=optional) + + +class CheckboxGroup(components.CheckboxGroup): + """ + Component creates a set of checkboxes of which a subset can be selected. Provides a list of strings representing the selected choices as an argument to the wrapped function. + Input type: Union[List[str], List[int]] + """ + + def __init__( + self, + choices: list[str], + default: list[str] | None = None, + type: str = "value", + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + choices (List[str]): list of options to select from. + default (List[str]): default selected list of options. + type (str): Type of value to be returned by component. "value" returns the list of strings of the choices selected, "index" returns the list of indices of the choices selected. + label (str): component name in interface. + optional (bool): this parameter is ignored. + """ + if default is None: + default = [] + warn_inputs_deprecation() + super().__init__( + value=default, + choices=choices, + type=type, + label=label, + optional=optional, + ) + + +class Radio(components.Radio): + """ + Component creates a set of radio buttons of which only one can be selected. Provides string representing selected choice as an argument to the wrapped function. + Input type: Union[str, int] + """ + + def __init__( + self, + choices: list[str], + type: str = "value", + default: Optional[str] = None, + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + choices (List[str]): list of options to select from. + type (str): Type of value to be returned by component. "value" returns the string of the choice selected, "index" returns the index of the choice selected. + default (str): the button selected by default. If None, no button is selected by default. + label (str): component name in interface. + optional (bool): this parameter is ignored. + """ + warn_inputs_deprecation() + super().__init__( + choices=choices, + type=type, + value=default, + label=label, + optional=optional, + ) + + +class Dropdown(components.Dropdown): + """ + Component creates a dropdown of which only one can be selected. Provides string representing selected choice as an argument to the wrapped function. + Input type: Union[str, int] + """ + + def __init__( + self, + choices: list[str], + type: str = "value", + default: Optional[str] = None, + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + choices (List[str]): list of options to select from. + type (str): Type of value to be returned by component. "value" returns the string of the choice selected, "index" returns the index of the choice selected. + default (str): default value selected in dropdown. If None, no value is selected by default. + label (str): component name in interface. + optional (bool): this parameter is ignored. + """ + warn_inputs_deprecation() + super().__init__( + choices=choices, + type=type, + value=default, + label=label, + optional=optional, + ) + + +class Image(components.Image): + """ + Component creates an image upload box with editing capabilities. + Input type: Union[numpy.array, PIL.Image, file-object] + """ + + def __init__( + self, + shape: tuple[int, int] = None, + image_mode: str = "RGB", + invert_colors: bool = False, + source: str = "upload", + tool: str = "editor", + type: str = "numpy", + label: str = None, + optional: bool = False, + ): + """ + Parameters: + shape (Tuple[int, int]): (width, height) shape to crop and resize image to; if None, matches input image size. + image_mode (str): How to process the uploaded image. Accepts any of the PIL image modes, e.g. "RGB" for color images, "RGBA" to include the transparency mask, "L" for black-and-white images. + invert_colors (bool): whether to invert the image as a preprocessing step. + source (str): Source of image. "upload" creates a box where user can drop an image file, "webcam" allows user to take snapshot from their webcam, "canvas" defaults to a white image that can be edited and drawn upon with tools. + tool (str): Tools used for editing. "editor" allows a full screen editor, "select" provides a cropping and zoom tool. + type (str): Type of value to be returned by component. "numpy" returns a numpy array with shape (height, width, 3) and values from 0 to 255, "pil" returns a PIL image object, "file" returns a temporary file object whose path can be retrieved by file_obj.name, "filepath" returns the path directly. + label (str): component name in interface. + optional (bool): If True, the interface can be submitted with no uploaded image, in which case the input value is None. + """ + warn_inputs_deprecation() + super().__init__( + shape=shape, + image_mode=image_mode, + invert_colors=invert_colors, + source=source, + tool=tool, + type=type, + label=label, + optional=optional, + ) + + +class Video(components.Video): + """ + Component creates a video file upload that is converted to a file path. + + Input type: filepath + """ + + def __init__( + self, + type: Optional[str] = None, + source: str = "upload", + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + type (str): Type of video format to be returned by component, such as 'avi' or 'mp4'. If set to None, video will keep uploaded format. + source (str): Source of video. "upload" creates a box where user can drop an video file, "webcam" allows user to record a video from their webcam. + label (str): component name in interface. + optional (bool): If True, the interface can be submitted with no uploaded video, in which case the input value is None. + """ + warn_inputs_deprecation() + super().__init__(format=type, source=source, label=label, optional=optional) + + +class Audio(components.Audio): + """ + Component accepts audio input files. + Input type: Union[Tuple[int, numpy.array], file-object, numpy.array] + """ + + def __init__( + self, + source: str = "upload", + type: str = "numpy", + label: str = None, + optional: bool = False, + ): + """ + Parameters: + source (str): Source of audio. "upload" creates a box where user can drop an audio file, "microphone" creates a microphone input. + type (str): Type of value to be returned by component. "numpy" returns a 2-set tuple with an integer sample_rate and the data numpy.array of shape (samples, 2), "file" returns a temporary file object whose path can be retrieved by file_obj.name, "filepath" returns the path directly. + label (str): component name in interface. + optional (bool): If True, the interface can be submitted with no uploaded audio, in which case the input value is None. + """ + warn_inputs_deprecation() + super().__init__(source=source, type=type, label=label, optional=optional) + + +class File(components.File): + """ + Component accepts generic file uploads. + Input type: Union[file-object, bytes, List[Union[file-object, bytes]]] + """ + + def __init__( + self, + file_count: str = "single", + type: str = "file", + label: Optional[str] = None, + keep_filename: bool = True, + optional: bool = False, + ): + """ + Parameters: + file_count (str): if single, allows user to upload one file. If "multiple", user uploads multiple files. If "directory", user uploads all files in selected directory. Return type will be list for each file in case of "multiple" or "directory". + type (str): Type of value to be returned by component. "file" returns a temporary file object whose path can be retrieved by file_obj.name, "binary" returns an bytes object. + label (str): component name in interface. + keep_filename (bool): DEPRECATED. Original filename always kept. + optional (bool): If True, the interface can be submitted with no uploaded image, in which case the input value is None. + """ + warn_inputs_deprecation() + super().__init__( + file_count=file_count, + type=type, + label=label, + keep_filename=keep_filename, + optional=optional, + ) + + +class Dataframe(components.Dataframe): + """ + Component accepts 2D input through a spreadsheet interface. + Input type: Union[pandas.DataFrame, numpy.array, List[Union[str, float]], List[List[Union[str, float]]]] + """ + + def __init__( + self, + headers: Optional[list[str]] = None, + row_count: int = 3, + col_count: Optional[int] = 3, + datatype: str | list[str] = "str", + col_width: int | list[int] = None, + default: Optional[list[list[Any]]] = None, + type: str = "pandas", + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + headers (List[str]): Header names to dataframe. If None, no headers are shown. + row_count (int): Limit number of rows for input. + col_count (int): Limit number of columns for input. If equal to 1, return data will be one-dimensional. Ignored if `headers` is provided. + datatype (Union[str, List[str]]): Datatype of values in sheet. Can be provided per column as a list of strings, or for the entire sheet as a single string. Valid datatypes are "str", "number", "bool", and "date". + col_width (Union[int, List[int]]): Width of columns in pixels. Can be provided as single value or list of values per column. + default (List[List[Any]]): Default value + type (str): Type of value to be returned by component. "pandas" for pandas dataframe, "numpy" for numpy array, or "array" for a Python array. + label (str): component name in interface. + optional (bool): this parameter is ignored. + """ + warn_inputs_deprecation() + super().__init__( + value=default, + headers=headers, + row_count=row_count, + col_count=col_count, + datatype=datatype, + col_width=col_width, + type=type, + label=label, + optional=optional, + ) + + +class Timeseries(components.Timeseries): + """ + Component accepts pandas.DataFrame uploaded as a timeseries csv file. + Input type: pandas.DataFrame + """ + + def __init__( + self, + x: Optional[str] = None, + y: str | list[str] = None, + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + x (str): Column name of x (time) series. None if csv has no headers, in which case first column is x series. + y (Union[str, List[str]]): Column name of y series, or list of column names if multiple series. None if csv has no headers, in which case every column after first is a y series. + label (str): component name in interface. + optional (bool): If True, the interface can be submitted with no uploaded csv file, in which case the input value is None. + """ + warn_inputs_deprecation() + super().__init__(x=x, y=y, label=label, optional=optional) + + +class State(components.State): + """ + Special hidden component that stores state across runs of the interface. + Input type: Any + """ + + def __init__( + self, + label: str = None, + default: Any = None, + ): + """ + Parameters: + label (str): component name in interface (not used). + default (Any): the initial value of the state. + optional (bool): this parameter is ignored. + """ + warn_inputs_deprecation() + super().__init__(value=default, label=label) + + +class Image3D(components.Model3D): + """ + Used for 3D image model output. + Input type: File object of type (.obj, glb, or .gltf) + """ + + def __init__( + self, + label: Optional[str] = None, + optional: bool = False, + ): + """ + Parameters: + label (str): component name in interface. + optional (bool): If True, the interface can be submitted with no uploaded image, in which case the input value is None. + """ + warn_inputs_deprecation() + super().__init__(label=label, optional=optional) diff --git a/testbed/gradio-app__gradio/gradio/interface.py b/testbed/gradio-app__gradio/gradio/interface.py new file mode 100644 index 0000000000000000000000000000000000000000..d2db6b78c684a645ed700dc64018afcd86bbad6a --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/interface.py @@ -0,0 +1,913 @@ +""" +This file defines two useful high-level abstractions to build Gradio apps: Interface and TabbedInterface. +""" + +from __future__ import annotations + +import inspect +import json +import os +import warnings +import weakref +from typing import TYPE_CHECKING, Any, Callable, Literal + +from gradio_client.documentation import document, set_documentation_group + +from gradio import Examples, external, interpretation, utils +from gradio.blocks import Blocks +from gradio.components import ( + Button, + ClearButton, + DuplicateButton, + Interpretation, + IOComponent, + Markdown, + State, + get_component_instance, +) +from gradio.data_classes import InterfaceTypes +from gradio.deprecation import warn_deprecation +from gradio.events import Changeable, Streamable, Submittable, on +from gradio.flagging import CSVLogger, FlaggingCallback, FlagMethod +from gradio.layouts import Column, Row, Tab, Tabs +from gradio.pipelines import load_from_pipeline +from gradio.themes import ThemeClass as Theme + +set_documentation_group("interface") + +if TYPE_CHECKING: # Only import for type checking (is False at runtime). + from transformers.pipelines.base import Pipeline + + from gradio.events import EventListenerMethod + + +@document("launch", "load", "from_pipeline", "integrate", "queue") +class Interface(Blocks): + """ + Interface is Gradio's main high-level class, and allows you to create a web-based GUI / demo + around a machine learning model (or any Python function) in a few lines of code. + You must specify three parameters: (1) the function to create a GUI for (2) the desired input components and + (3) the desired output components. Additional parameters can be used to control the appearance + and behavior of the demo. + + Example: + import gradio as gr + + def image_classifier(inp): + return {'cat': 0.3, 'dog': 0.7} + + demo = gr.Interface(fn=image_classifier, inputs="image", outputs="label") + demo.launch() + Demos: hello_world, hello_world_3, gpt2_xl + Guides: quickstart, key-features, sharing-your-app, interface-state, reactive-interfaces, advanced-interface-features, setting-up-a-gradio-demo-for-maximum-performance + """ + + # stores references to all currently existing Interface instances + instances: weakref.WeakSet = weakref.WeakSet() + + @classmethod + def get_instances(cls) -> list[Interface]: + """ + :return: list of all current instances. + """ + return list(Interface.instances) + + @classmethod + def load( + cls, + name: str, + src: str | None = None, + api_key: str | None = None, + alias: str | None = None, + **kwargs, + ) -> Blocks: + """ + Warning: this method will be deprecated. Use the equivalent `gradio.load()` instead. This is a class + method that constructs a Blocks from a Hugging Face repo. Can accept + model repos (if src is "models") or Space repos (if src is "spaces"). The input + and output components are automatically loaded from the repo. + Parameters: + name: the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base") + src: the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`) + api_key: optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens. Warning: only provide this if you are loading a trusted private Space as it can be read by the Space you are loading. + alias: optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x) + Returns: + a Gradio Interface object for the given model + """ + warn_deprecation( + "gr.Interface.load() will be deprecated. Use gr.load() instead." + ) + return external.load( + name=name, src=src, hf_token=api_key, alias=alias, **kwargs + ) + + @classmethod + def from_pipeline(cls, pipeline: Pipeline, **kwargs) -> Interface: + """ + Class method that constructs an Interface from a Hugging Face transformers.Pipeline object. + The input and output components are automatically determined from the pipeline. + Parameters: + pipeline: the pipeline object to use. + Returns: + a Gradio Interface object from the given Pipeline + Example: + import gradio as gr + from transformers import pipeline + pipe = pipeline("image-classification") + gr.Interface.from_pipeline(pipe).launch() + """ + interface_info = load_from_pipeline(pipeline) + kwargs = dict(interface_info, **kwargs) + interface = cls(**kwargs) + return interface + + def __init__( + self, + fn: Callable, + inputs: str | IOComponent | list[str | IOComponent] | None, + outputs: str | IOComponent | list[str | IOComponent] | None, + examples: list[Any] | list[list[Any]] | str | None = None, + cache_examples: bool | None = None, + examples_per_page: int = 10, + live: bool = False, + interpretation: Callable | str | None = None, + num_shap: float = 2.0, + title: str | None = None, + description: str | None = None, + article: str | None = None, + thumbnail: str | None = None, + theme: Theme | str | None = None, + css: str | None = None, + allow_flagging: str | None = None, + flagging_options: list[str] | list[tuple[str, str]] | None = None, + flagging_dir: str = "flagged", + flagging_callback: FlaggingCallback = CSVLogger(), + analytics_enabled: bool | None = None, + batch: bool = False, + max_batch_size: int = 4, + api_name: str | Literal[False] | None = "predict", + _api_mode: bool = False, + allow_duplication: bool = False, + **kwargs, + ): + """ + Parameters: + fn: the function to wrap an interface around. Often a machine learning model's prediction function. Each parameter of the function corresponds to one input component, and the function should return a single value or a tuple of values, with each element in the tuple corresponding to one output component. + inputs: a single Gradio component, or list of Gradio components. Components can either be passed as instantiated objects, or referred to by their string shortcuts. The number of input components should match the number of parameters in fn. If set to None, then only the output components will be displayed. + outputs: a single Gradio component, or list of Gradio components. Components can either be passed as instantiated objects, or referred to by their string shortcuts. The number of output components should match the number of values returned by fn. If set to None, then only the input components will be displayed. + examples: sample inputs for the function; if provided, appear below the UI components and can be clicked to populate the interface. Should be nested list, in which the outer list consists of samples and each inner list consists of an input corresponding to each input component. A string path to a directory of examples can also be provided, but it should be within the directory with the python file running the gradio app. If there are multiple input components and a directory is provided, a log.csv file must be present in the directory to link corresponding inputs. + cache_examples: If True, caches examples in the server for fast runtime in examples. If `fn` is a generator function, then the last yielded value will be used as the output. The default option in HuggingFace Spaces is True. The default option elsewhere is False. + examples_per_page: If examples are provided, how many to display per page. + live: whether the interface should automatically rerun if any of the inputs change. + interpretation: function that provides interpretation explaining prediction output. Pass "default" to use simple built-in interpreter, "shap" to use a built-in shapley-based interpreter, or your own custom interpretation function. For more information on the different interpretation methods, see the Advanced Interface Features guide. + num_shap: a multiplier that determines how many examples are computed for shap-based interpretation. Increasing this value will increase shap runtime, but improve results. Only applies if interpretation is "shap". + title: a title for the interface; if provided, appears above the input and output components in large font. Also used as the tab title when opened in a browser window. + description: a description for the interface; if provided, appears above the input and output components and beneath the title in regular font. Accepts Markdown and HTML content. + article: an expanded article explaining the interface; if provided, appears below the input and output components in regular font. Accepts Markdown and HTML content. + thumbnail: path or url to image to use as display image when the web demo is shared on social media. + theme: Theme to use, loaded from gradio.themes. + css: custom css or path to custom css file to use with interface. + allow_flagging: one of "never", "auto", or "manual". If "never" or "auto", users will not see a button to flag an input and output. If "manual", users will see a button to flag. If "auto", every input the user submits will be automatically flagged (outputs are not flagged). If "manual", both the input and outputs are flagged when the user clicks flag button. This parameter can be set with environmental variable GRADIO_ALLOW_FLAGGING; otherwise defaults to "manual". + flagging_options: if provided, allows user to select from the list of options when flagging. Only applies if allow_flagging is "manual". Can either be a list of tuples of the form (label, value), where label is the string that will be displayed on the button and value is the string that will be stored in the flagging CSV; or it can be a list of strings ["X", "Y"], in which case the values will be the list of strings and the labels will ["Flag as X", "Flag as Y"], etc. + flagging_dir: what to name the directory where flagged data is stored. + flagging_callback: An instance of a subclass of FlaggingCallback which will be called when a sample is flagged. By default logs to a local CSV file. + analytics_enabled: Whether to allow basic telemetry. If None, will use GRADIO_ANALYTICS_ENABLED environment variable if defined, or default to True. + batch: If True, then the function should process a batch of inputs, meaning that it should accept a list of input values for each parameter. The lists should be of equal length (and be up to length `max_batch_size`). The function is then *required* to return a tuple of lists (even if there is only 1 output component), with each list in the tuple corresponding to one output component. + max_batch_size: Maximum number of inputs to batch together if this is called from the queue (only relevant if batch=True) + api_name: Defines how the endpoint appears in the API docs. Can be a string, None, or False. If False or None, the endpoint will not be exposed in the api docs. If set to a string, the endpoint will be exposed in the api docs with the given name. Default value is "predict". + allow_duplication: If True, then will show a 'Duplicate Spaces' button on Hugging Face Spaces. + """ + super().__init__( + analytics_enabled=analytics_enabled, + mode="interface", + css=css, + title=title or "Gradio", + theme=theme, + **kwargs, + ) + self.api_name: str | Literal[False] | None = api_name + + if isinstance(fn, list): + raise DeprecationWarning( + "The `fn` parameter only accepts a single function, support for a list " + "of functions has been deprecated. Please use gradio.mix.Parallel " + "instead." + ) + + self.interface_type = InterfaceTypes.STANDARD + if (inputs is None or inputs == []) and (outputs is None or outputs == []): + raise ValueError("Must provide at least one of `inputs` or `outputs`") + elif outputs is None or outputs == []: + outputs = [] + self.interface_type = InterfaceTypes.INPUT_ONLY + elif inputs is None or inputs == []: + inputs = [] + self.interface_type = InterfaceTypes.OUTPUT_ONLY + + assert isinstance(inputs, (str, list, IOComponent)) + assert isinstance(outputs, (str, list, IOComponent)) + + if not isinstance(inputs, list): + inputs = [inputs] + if not isinstance(outputs, list): + outputs = [outputs] + + if self.space_id and cache_examples is None: + self.cache_examples = True + else: + self.cache_examples = cache_examples or False + + state_input_indexes = [ + idx for idx, i in enumerate(inputs) if i == "state" or isinstance(i, State) + ] + state_output_indexes = [ + idx for idx, o in enumerate(outputs) if o == "state" or isinstance(o, State) + ] + + if len(state_input_indexes) == 0 and len(state_output_indexes) == 0: + pass + elif len(state_input_indexes) != 1 or len(state_output_indexes) != 1: + raise ValueError( + "If using 'state', there must be exactly one state input and one state output." + ) + else: + state_input_index = state_input_indexes[0] + state_output_index = state_output_indexes[0] + if inputs[state_input_index] == "state": + default = utils.get_default_args(fn)[state_input_index] + state_variable = State(value=default) # type: ignore + else: + state_variable = inputs[state_input_index] + + inputs[state_input_index] = state_variable + outputs[state_output_index] = state_variable + + if cache_examples: + warnings.warn( + "Cache examples cannot be used with state inputs and outputs." + "Setting cache_examples to False." + ) + self.cache_examples = False + + self.input_components = [ + get_component_instance(i, unrender=True) for i in inputs # type: ignore + ] + self.output_components = [ + get_component_instance(o, unrender=True) for o in outputs # type: ignore + ] + + for component in self.input_components + self.output_components: + if not (isinstance(component, IOComponent)): + raise ValueError( + f"{component} is not a valid input/output component for Interface." + ) + + if len(self.input_components) == len(self.output_components): + same_components = [ + i is o for i, o in zip(self.input_components, self.output_components) + ] + if all(same_components): + self.interface_type = InterfaceTypes.UNIFIED + + if self.interface_type in [ + InterfaceTypes.STANDARD, + InterfaceTypes.OUTPUT_ONLY, + ]: + for o in self.output_components: + assert isinstance(o, IOComponent) + if o.interactive is None: + # Unless explicitly otherwise specified, force output components to + # be non-interactive + o.interactive = False + if ( + interpretation is None + or isinstance(interpretation, list) + or callable(interpretation) + ): + self.interpretation = interpretation + elif isinstance(interpretation, str): + self.interpretation = [ + interpretation.lower() for _ in self.input_components + ] + else: + raise ValueError("Invalid value for parameter: interpretation") + + self.api_mode = _api_mode + self.fn = fn + self.fn_durations = [0, 0] + self.__name__ = getattr(fn, "__name__", "fn") + self.live = live + self.title = title + + self.simple_description = utils.remove_html_tags(description) + self.description = description + if article is not None: + article = utils.readme_to_html(article) + self.article = article + + self.thumbnail = thumbnail + + self.examples = examples + self.num_shap = num_shap + self.examples_per_page = examples_per_page + + self.simple_server = None + + # For allow_flagging: (1) first check for parameter, + # (2) check for env variable, (3) default to True/"manual" + if allow_flagging is None: + allow_flagging = os.getenv("GRADIO_ALLOW_FLAGGING", "manual") + if allow_flagging is True: + warnings.warn( + "The `allow_flagging` parameter in `Interface` now" + "takes a string value ('auto', 'manual', or 'never')" + ", not a boolean. Setting parameter to: 'manual'." + ) + self.allow_flagging = "manual" + elif allow_flagging == "manual": + self.allow_flagging = "manual" + elif allow_flagging is False: + warnings.warn( + "The `allow_flagging` parameter in `Interface` now" + "takes a string value ('auto', 'manual', or 'never')" + ", not a boolean. Setting parameter to: 'never'." + ) + self.allow_flagging = "never" + elif allow_flagging == "never": + self.allow_flagging = "never" + elif allow_flagging == "auto": + self.allow_flagging = "auto" + else: + raise ValueError( + "Invalid value for `allow_flagging` parameter." + "Must be: 'auto', 'manual', or 'never'." + ) + + if flagging_options is None: + self.flagging_options = [("Flag", "")] + elif not (isinstance(flagging_options, list)): + raise ValueError( + "flagging_options must be a list of strings or list of (string, string) tuples." + ) + elif all(isinstance(x, str) for x in flagging_options): + self.flagging_options = [(f"Flag as {x}", x) for x in flagging_options] + elif all(isinstance(x, tuple) for x in flagging_options): + self.flagging_options = flagging_options + else: + raise ValueError( + "flagging_options must be a list of strings or list of (string, string) tuples." + ) + + self.flagging_callback = flagging_callback + self.flagging_dir = flagging_dir + self.batch = batch + self.max_batch_size = max_batch_size + self.allow_duplication = allow_duplication + + self.share = None + self.share_url = None + self.local_url = None + + self.favicon_path = None + Interface.instances.add(self) + + param_types = utils.get_type_hints(self.fn) + param_names = inspect.getfullargspec(self.fn)[0] + if len(param_names) > 0 and inspect.ismethod(self.fn): + param_names = param_names[1:] + for param_name in param_names.copy(): + if utils.is_special_typed_parameter(param_name, param_types): + param_names.remove(param_name) + for component, param_name in zip(self.input_components, param_names): + assert isinstance(component, IOComponent) + if component.label is None: + component.label = param_name + for i, component in enumerate(self.output_components): + assert isinstance(component, IOComponent) + if component.label is None: + if len(self.output_components) == 1: + component.label = "output" + else: + component.label = f"output {i}" + + if self.allow_flagging != "never": + if ( + self.interface_type == InterfaceTypes.UNIFIED + or self.allow_flagging == "auto" + ): + self.flagging_callback.setup(self.input_components, self.flagging_dir) # type: ignore + elif self.interface_type == InterfaceTypes.INPUT_ONLY: + pass + else: + self.flagging_callback.setup( + self.input_components + self.output_components, self.flagging_dir # type: ignore + ) + + # Render the Gradio UI + with self: + self.render_title_description() + + submit_btn, clear_btn, stop_btn, flag_btns, duplicate_btn = ( + None, + None, + None, + None, + None, + ) + interpretation_btn, interpretation_set = None, None + input_component_column, interpret_component_column = None, None + + with Row(equal_height=False): + if self.interface_type in [ + InterfaceTypes.STANDARD, + InterfaceTypes.INPUT_ONLY, + InterfaceTypes.UNIFIED, + ]: + ( + submit_btn, + clear_btn, + stop_btn, + flag_btns, + input_component_column, + interpret_component_column, + interpretation_set, + ) = self.render_input_column() + if self.interface_type in [ + InterfaceTypes.STANDARD, + InterfaceTypes.OUTPUT_ONLY, + ]: + ( + submit_btn_out, + clear_btn_2_out, + duplicate_btn, + stop_btn_2_out, + flag_btns_out, + interpretation_btn, + ) = self.render_output_column(submit_btn) + submit_btn = submit_btn or submit_btn_out + clear_btn = clear_btn or clear_btn_2_out + stop_btn = stop_btn or stop_btn_2_out + flag_btns = flag_btns or flag_btns_out + + assert clear_btn is not None, "Clear button not rendered" + + self.attach_submit_events(submit_btn, stop_btn) + self.attach_clear_events( + clear_btn, input_component_column, interpret_component_column + ) + if duplicate_btn is not None: + duplicate_btn.activate() + self.attach_interpretation_events( + interpretation_btn, + interpretation_set, + input_component_column, + interpret_component_column, + ) + + self.attach_flagging_events(flag_btns, clear_btn) + self.render_examples() + self.render_article() + + self.config = self.get_config_file() + + def render_title_description(self) -> None: + if self.title: + Markdown( + f"

{self.title}

" + ) + if self.description: + Markdown(self.description) + + def render_flag_btns(self) -> list[Button]: + return [Button(label) for label, _ in self.flagging_options] + + def render_input_column( + self, + ) -> tuple[ + Button | None, + ClearButton | None, + Button | None, + list[Button] | None, + Column, + Column | None, + list[Interpretation] | None, + ]: + submit_btn, clear_btn, stop_btn, flag_btns = None, None, None, None + interpret_component_column, interpretation_set = None, None + + with Column(variant="panel"): + input_component_column = Column() + with input_component_column: + for component in self.input_components: + component.render() + if self.interpretation: + interpret_component_column = Column(visible=False) + interpretation_set = [] + with interpret_component_column: + for component in self.input_components: + interpretation_set.append(Interpretation(component)) + with Row(): + if self.interface_type in [ + InterfaceTypes.STANDARD, + InterfaceTypes.INPUT_ONLY, + ]: + clear_btn = ClearButton() + if not self.live: + submit_btn = Button("Submit", variant="primary") + # Stopping jobs only works if the queue is enabled + # We don't know if the queue is enabled when the interface + # is created. We use whether a generator function is provided + # as a proxy of whether the queue will be enabled. + # Using a generator function without the queue will raise an error. + if inspect.isgeneratorfunction( + self.fn + ) or inspect.isasyncgenfunction(self.fn): + stop_btn = Button("Stop", variant="stop", visible=False) + elif self.interface_type == InterfaceTypes.UNIFIED: + clear_btn = ClearButton() + submit_btn = Button("Submit", variant="primary") + if ( + inspect.isgeneratorfunction(self.fn) + or inspect.isasyncgenfunction(self.fn) + ) and not self.live: + stop_btn = Button("Stop", variant="stop") + if self.allow_flagging == "manual": + flag_btns = self.render_flag_btns() + elif self.allow_flagging == "auto": + flag_btns = [submit_btn] + return ( + submit_btn, + clear_btn, + stop_btn, + flag_btns, + input_component_column, + interpret_component_column, + interpretation_set, + ) + + def render_output_column( + self, + submit_btn_in: Button | None, + ) -> tuple[ + Button | None, + ClearButton | None, + DuplicateButton, + Button | None, + list | None, + Button | None, + ]: + submit_btn = submit_btn_in + interpretation_btn, clear_btn, duplicate_btn, flag_btns, stop_btn = ( + None, + None, + None, + None, + None, + ) + + with Column(variant="panel"): + for component in self.output_components: + if not (isinstance(component, State)): + component.render() + with Row(): + if self.interface_type == InterfaceTypes.OUTPUT_ONLY: + clear_btn = ClearButton() + submit_btn = Button("Generate", variant="primary") + if ( + inspect.isgeneratorfunction(self.fn) + or inspect.isasyncgenfunction(self.fn) + ) and not self.live: + # Stopping jobs only works if the queue is enabled + # We don't know if the queue is enabled when the interface + # is created. We use whether a generator function is provided + # as a proxy of whether the queue will be enabled. + # Using a generator function without the queue will raise an error. + stop_btn = Button("Stop", variant="stop", visible=False) + if self.allow_flagging == "manual": + flag_btns = self.render_flag_btns() + elif self.allow_flagging == "auto": + assert submit_btn is not None, "Submit button not rendered" + flag_btns = [submit_btn] + + if self.interpretation: + interpretation_btn = Button("Interpret") + + if self.allow_duplication: + duplicate_btn = DuplicateButton(scale=1, size="lg", _activate=False) + + return ( + submit_btn, + clear_btn, + duplicate_btn, + stop_btn, + flag_btns, + interpretation_btn, + ) + + def render_article(self): + if self.article: + Markdown(self.article) + + def attach_submit_events(self, submit_btn: Button | None, stop_btn: Button | None): + if self.live: + if self.interface_type == InterfaceTypes.OUTPUT_ONLY: + assert submit_btn is not None, "Submit button not rendered" + super().load(self.fn, None, self.output_components) + # For output-only interfaces, the user probably still want a "generate" + # button even if the Interface is live + submit_btn.click( + self.fn, + None, + self.output_components, + api_name=self.api_name, + preprocess=not (self.api_mode), + postprocess=not (self.api_mode), + batch=self.batch, + max_batch_size=self.max_batch_size, + ) + else: + events: list[EventListenerMethod] = [] + for component in self.input_components: + if isinstance(component, Streamable) and component.streaming: + events.append(component.stream) + elif isinstance(component, Changeable): + events.append(component.change) + on( + events, + self.fn, + self.input_components, + self.output_components, + api_name=self.api_name, + preprocess=not (self.api_mode), + postprocess=not (self.api_mode), + ) + else: + assert submit_btn is not None, "Submit button not rendered" + fn = self.fn + extra_output = [] + + triggers = [submit_btn.click] + [ + component.submit + for component in self.input_components + if isinstance(component, Submittable) + ] + + if stop_btn: + extra_output = [submit_btn, stop_btn] + + def cleanup(): + return [Button.update(visible=True), Button.update(visible=False)] + + predict_event = on( + triggers, + lambda: ( + submit_btn.update(visible=False), + stop_btn.update(visible=True), + ), + inputs=None, + outputs=[submit_btn, stop_btn], + queue=False, + ).then( + self.fn, + self.input_components, + self.output_components, + api_name=self.api_name, + scroll_to_output=True, + preprocess=not (self.api_mode), + postprocess=not (self.api_mode), + batch=self.batch, + max_batch_size=self.max_batch_size, + ) + + predict_event.then( + cleanup, + inputs=None, + outputs=extra_output, # type: ignore + queue=False, + ) + + stop_btn.click( + cleanup, + inputs=None, + outputs=[submit_btn, stop_btn], + cancels=predict_event, + queue=False, + ) + else: + on( + triggers, + fn, + self.input_components, + self.output_components, + api_name=self.api_name, + scroll_to_output=True, + preprocess=not (self.api_mode), + postprocess=not (self.api_mode), + batch=self.batch, + max_batch_size=self.max_batch_size, + ) + + def attach_clear_events( + self, + clear_btn: ClearButton, + input_component_column: Column | None, + interpret_component_column: Column | None, + ): + clear_btn.add(self.input_components + self.output_components) + clear_btn.click( + None, + [], + ( + ([input_component_column] if input_component_column else []) + + ([interpret_component_column] if self.interpretation else []) + ), # type: ignore + _js=f"""() => {json.dumps( + ( + [Column.update(visible=True)] + if self.interface_type + in [ + InterfaceTypes.STANDARD, + InterfaceTypes.INPUT_ONLY, + InterfaceTypes.UNIFIED, + ] + else [] + ) + + ([Column.update(visible=False)] if self.interpretation else []) + )} + """, + ) + + def attach_interpretation_events( + self, + interpretation_btn: Button | None, + interpretation_set: list[Interpretation] | None, + input_component_column: Column | None, + interpret_component_column: Column | None, + ): + if interpretation_btn: + interpretation_btn.click( + self.interpret_func, + inputs=self.input_components + self.output_components, + outputs=(interpretation_set or []) + [input_component_column, interpret_component_column], # type: ignore + preprocess=False, + ) + + def attach_flagging_events( + self, flag_btns: list[Button] | None, clear_btn: ClearButton + ): + if not ( + flag_btns + and self.interface_type + in ( + InterfaceTypes.STANDARD, + InterfaceTypes.OUTPUT_ONLY, + InterfaceTypes.UNIFIED, + ) + ): + return + + if self.allow_flagging == "auto": + flag_method = FlagMethod( + self.flagging_callback, "", "", visual_feedback=False + ) + flag_btns[0].click( # flag_btns[0] is just the "Submit" button + flag_method, + inputs=self.input_components, + outputs=None, + preprocess=False, + queue=False, + ) + return + + if self.interface_type == InterfaceTypes.UNIFIED: + flag_components = self.input_components + else: + flag_components = self.input_components + self.output_components + + for flag_btn, (label, value) in zip(flag_btns, self.flagging_options): + assert isinstance(value, str) + flag_method = FlagMethod(self.flagging_callback, label, value) + flag_btn.click( + lambda: Button.update(value="Saving...", interactive=False), + None, + flag_btn, + queue=False, + ) + flag_btn.click( + flag_method, + inputs=flag_components, + outputs=flag_btn, + preprocess=False, + queue=False, + ) + clear_btn.click( + flag_method.reset, + None, + flag_btn, + queue=False, + ) + + def render_examples(self): + if self.examples: + non_state_inputs = [ + c for c in self.input_components if not isinstance(c, State) + ] + non_state_outputs = [ + c for c in self.output_components if not isinstance(c, State) + ] + self.examples_handler = Examples( + examples=self.examples, + inputs=non_state_inputs, # type: ignore + outputs=non_state_outputs, # type: ignore + fn=self.fn, + cache_examples=self.cache_examples, + examples_per_page=self.examples_per_page, + _api_mode=self.api_mode, + batch=self.batch, + ) + + def __str__(self): + return self.__repr__() + + def __repr__(self): + repr = f"Gradio Interface for: {self.__name__}" + repr += f"\n{'-' * len(repr)}" + repr += "\ninputs:" + for component in self.input_components: + repr += f"\n|-{component}" + repr += "\noutputs:" + for component in self.output_components: + repr += f"\n|-{component}" + return repr + + async def interpret_func(self, *args): + return await self.interpret(list(args)) + [ + Column.update(visible=False), + Column.update(visible=True), + ] + + async def interpret(self, raw_input: list[Any]) -> list[Any]: + return [ + {"original": raw_value, "interpretation": interpretation} + for interpretation, raw_value in zip( + (await interpretation.run_interpret(self, raw_input))[0], raw_input + ) + ] + + def test_launch(self) -> None: + """ + Deprecated. + """ + warn_deprecation("The Interface.test_launch() function is deprecated.") + + +@document() +class TabbedInterface(Blocks): + """ + A TabbedInterface is created by providing a list of Interfaces, each of which gets + rendered in a separate tab. + Demos: stt_or_tts + """ + + def __init__( + self, + interface_list: list[Interface], + tab_names: list[str] | None = None, + title: str | None = None, + theme: Theme | None = None, + analytics_enabled: bool | None = None, + css: str | None = None, + ): + """ + Parameters: + interface_list: a list of interfaces to be rendered in tabs. + tab_names: a list of tab names. If None, the tab names will be "Tab 1", "Tab 2", etc. + title: a title for the interface; if provided, appears above the input and output components in large font. Also used as the tab title when opened in a browser window. + analytics_enabled: whether to allow basic telemetry. If None, will use GRADIO_ANALYTICS_ENABLED environment variable or default to True. + css: custom css or path to custom css file to apply to entire Blocks + Returns: + a Gradio Tabbed Interface for the given interfaces + """ + super().__init__( + title=title or "Gradio", + theme=theme, + analytics_enabled=analytics_enabled, + mode="tabbed_interface", + css=css, + ) + if tab_names is None: + tab_names = [f"Tab {i}" for i in range(len(interface_list))] + with self: + if title: + Markdown( + f"

{title}

" + ) + with Tabs(): + for interface, tab_name in zip(interface_list, tab_names): + with Tab(label=tab_name): + interface.render() + + +def close_all(verbose: bool = True) -> None: + for io in Interface.get_instances(): + io.close(verbose) diff --git a/testbed/gradio-app__gradio/gradio/interpretation.py b/testbed/gradio-app__gradio/gradio/interpretation.py new file mode 100644 index 0000000000000000000000000000000000000000..767ad641b99a51c08b4efadec350c7170bdc734b --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/interpretation.py @@ -0,0 +1,328 @@ +"""Contains classes and methods related to interpretation for components in Gradio.""" + +from __future__ import annotations + +import copy +import math +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +import numpy as np +from gradio_client import utils as client_utils + +from gradio import components + +if TYPE_CHECKING: # Only import for type checking (is False at runtime). + from gradio import Interface + + +class Interpretable(ABC): # noqa: B024 + def __init__(self) -> None: + self.set_interpret_parameters() + + def set_interpret_parameters(self): # noqa: B027 + """ + Set any parameters for interpretation. Properties can be set here to be + used in get_interpretation_neighbors and get_interpretation_scores. + """ + pass + + def get_interpretation_scores( + self, x: Any, neighbors: list[Any] | None, scores: list[float], **kwargs + ) -> list: + """ + Arrange the output values from the neighbors into interpretation scores for the interface to render. + Parameters: + x: Input to interface + neighbors: Neighboring values to input x used for interpretation. + scores: Output value corresponding to each neighbor in neighbors + Returns: + Arrangement of interpretation scores for interfaces to render. + """ + return scores + + +class TokenInterpretable(Interpretable, ABC): + @abstractmethod + def tokenize(self, x: Any) -> tuple[list, list, None]: + """ + Interprets an input data point x by splitting it into a list of tokens (e.g + a string into words or an image into super-pixels). + """ + return [], [], None + + @abstractmethod + def get_masked_inputs(self, tokens: list, binary_mask_matrix: list[list]) -> list: + return [] + + +class NeighborInterpretable(Interpretable, ABC): + @abstractmethod + def get_interpretation_neighbors(self, x: Any) -> tuple[list, dict]: + """ + Generates values similar to input to be used to interpret the significance of the input in the final output. + Parameters: + x: Input to interface + Returns: (neighbor_values, interpret_kwargs, interpret_by_removal) + neighbor_values: Neighboring values to input x to compute for interpretation + interpret_kwargs: Keyword arguments to be passed to get_interpretation_scores + """ + return [], {} + + +async def run_interpret(interface: Interface, raw_input: list): + """ + Runs the interpretation command for the machine learning model. Handles both the "default" out-of-the-box + interpretation for a certain set of UI component types, as well as the custom interpretation case. + Parameters: + raw_input: a list of raw inputs to apply the interpretation(s) on. + """ + if isinstance(interface.interpretation, list): # Either "default" or "shap" + processed_input = [ + input_component.preprocess(raw_input[i]) + for i, input_component in enumerate(interface.input_components) + ] + original_output = await interface.call_function(0, processed_input) + original_output = original_output["prediction"] + + if len(interface.output_components) == 1: + original_output = [original_output] + + scores, alternative_outputs = [], [] + + for i, (x, interp) in enumerate(zip(raw_input, interface.interpretation)): + if interp == "default": + input_component = interface.input_components[i] + neighbor_raw_input = list(raw_input) + if isinstance(input_component, TokenInterpretable): + tokens, neighbor_values, masks = input_component.tokenize(x) + interface_scores = [] + alternative_output = [] + for neighbor_input in neighbor_values: + neighbor_raw_input[i] = neighbor_input + processed_neighbor_input = [ + input_component.preprocess(neighbor_raw_input[i]) + for i, input_component in enumerate( + interface.input_components + ) + ] + + neighbor_output = await interface.call_function( + 0, processed_neighbor_input + ) + neighbor_output = neighbor_output["prediction"] + if len(interface.output_components) == 1: + neighbor_output = [neighbor_output] + processed_neighbor_output = [ + output_component.postprocess(neighbor_output[i]) + for i, output_component in enumerate( + interface.output_components + ) + ] + + alternative_output.append(processed_neighbor_output) + interface_scores.append( + quantify_difference_in_label( + interface, original_output, neighbor_output + ) + ) + alternative_outputs.append(alternative_output) + scores.append( + input_component.get_interpretation_scores( + raw_input[i], + neighbor_values, + interface_scores, + masks=masks, + tokens=tokens, + ) + ) + elif isinstance(input_component, NeighborInterpretable): + ( + neighbor_values, + interpret_kwargs, + ) = input_component.get_interpretation_neighbors( + x + ) # type: ignore + interface_scores = [] + alternative_output = [] + for neighbor_input in neighbor_values: + neighbor_raw_input[i] = neighbor_input + processed_neighbor_input = [ + input_component.preprocess(neighbor_raw_input[i]) + for i, input_component in enumerate( + interface.input_components + ) + ] + neighbor_output = await interface.call_function( + 0, processed_neighbor_input + ) + neighbor_output = neighbor_output["prediction"] + if len(interface.output_components) == 1: + neighbor_output = [neighbor_output] + processed_neighbor_output = [ + output_component.postprocess(neighbor_output[i]) + for i, output_component in enumerate( + interface.output_components + ) + ] + + alternative_output.append(processed_neighbor_output) + interface_scores.append( + quantify_difference_in_label( + interface, original_output, neighbor_output + ) + ) + alternative_outputs.append(alternative_output) + interface_scores = [-score for score in interface_scores] + scores.append( + input_component.get_interpretation_scores( + raw_input[i], + neighbor_values, + interface_scores, + **interpret_kwargs, + ) + ) + else: + raise ValueError( + f"Component {input_component} does not support interpretation" + ) + elif interp == "shap" or interp == "shapley": + try: + import shap # type: ignore + except (ImportError, ModuleNotFoundError) as err: + raise ValueError( + "The package `shap` is required for this interpretation method. Try: `pip install shap`" + ) from err + input_component = interface.input_components[i] + if not isinstance(input_component, TokenInterpretable): + raise ValueError( + f"Input component {input_component} does not support `shap` interpretation" + ) + + tokens, _, masks = input_component.tokenize(x) + + # construct a masked version of the input + def get_masked_prediction(binary_mask): + assert isinstance(input_component, TokenInterpretable) + masked_xs = input_component.get_masked_inputs(tokens, binary_mask) + preds = [] + for masked_x in masked_xs: + processed_masked_input = copy.deepcopy(processed_input) + processed_masked_input[i] = input_component.preprocess(masked_x) + new_output = client_utils.synchronize_async( + interface.call_function, 0, processed_masked_input + ) + new_output = new_output["prediction"] + if len(interface.output_components) == 1: + new_output = [new_output] + pred = get_regression_or_classification_value( + interface, original_output, new_output + ) + preds.append(pred) + return np.array(preds) + + num_total_segments = len(tokens) + explainer = shap.KernelExplainer( + get_masked_prediction, np.zeros((1, num_total_segments)) + ) + shap_values = explainer.shap_values( + np.ones((1, num_total_segments)), + nsamples=int(interface.num_shap * num_total_segments), + silent=True, + ) + assert shap_values is not None, "SHAP values could not be calculated" + scores.append( + input_component.get_interpretation_scores( + raw_input[i], + None, + shap_values[0].tolist(), + masks=masks, + tokens=tokens, + ) + ) + alternative_outputs.append([]) + elif interp is None: + scores.append(None) + alternative_outputs.append([]) + else: + raise ValueError(f"Unknown interpretation method: {interp}") + return scores, alternative_outputs + elif interface.interpretation: # custom interpretation function + processed_input = [ + input_component.preprocess(raw_input[i]) + for i, input_component in enumerate(interface.input_components) + ] + interpreter = interface.interpretation + interpretation = interpreter(*processed_input) + if len(raw_input) == 1: + interpretation = [interpretation] + return interpretation, [] + else: + raise ValueError("No interpretation method specified.") + + +def diff(original: Any, perturbed: Any) -> int | float: + try: # try computing numerical difference + score = float(original) - float(perturbed) + except ValueError: # otherwise, look at strict difference in label + score = int(original != perturbed) + return score + + +def quantify_difference_in_label( + interface: Interface, original_output: list, perturbed_output: list +) -> int | float: + output_component = interface.output_components[0] + post_original_output = output_component.postprocess(original_output[0]) + post_perturbed_output = output_component.postprocess(perturbed_output[0]) + + if isinstance(output_component, components.Label): + original_label = post_original_output["label"] + perturbed_label = post_perturbed_output["label"] + + # Handle different return types of Label interface + if "confidences" in post_original_output: + original_confidence = original_output[0][original_label] + perturbed_confidence = perturbed_output[0][original_label] + score = original_confidence - perturbed_confidence + else: + score = diff(original_label, perturbed_label) + return score + + elif isinstance(output_component, components.Number): + score = diff(post_original_output, post_perturbed_output) + return score + + else: + raise ValueError( + f"This interpretation method doesn't support the Output component: {output_component}" + ) + + +def get_regression_or_classification_value( + interface: Interface, original_output: list, perturbed_output: list +) -> int | float: + """Used to combine regression/classification for Shap interpretation method.""" + output_component = interface.output_components[0] + post_original_output = output_component.postprocess(original_output[0]) + post_perturbed_output = output_component.postprocess(perturbed_output[0]) + + if isinstance(output_component, components.Label): + original_label = post_original_output["label"] + perturbed_label = post_perturbed_output["label"] + + # Handle different return types of Label interface + if "confidences" in post_original_output: + if math.isnan(perturbed_output[0][original_label]): + return 0 + return perturbed_output[0][original_label] + else: + score = diff( + perturbed_label, original_label + ) # Intentionally inverted order of arguments. + return score + + else: + raise ValueError( + f"This interpretation method doesn't support the Output component: {output_component}" + ) diff --git a/testbed/gradio-app__gradio/gradio/ipython_ext.py b/testbed/gradio-app__gradio/gradio/ipython_ext.py new file mode 100644 index 0000000000000000000000000000000000000000..b6bb8063930f2ff60fba39459ce0b7829987f959 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/ipython_ext.py @@ -0,0 +1,89 @@ +try: + from IPython.core.magic import ( + needs_local_scope, + register_cell_magic, + ) + from IPython.core.magic_arguments import argument, magic_arguments, parse_argstring +except ImportError: + pass + +import gradio as gr +from gradio.networking import App +from gradio.utils import BaseReloader + + +class CellIdTracker: + """Determines the most recently run cell in the notebook. + + Needed to keep track of which demo the user is updating. + """ + + def __init__(self, ipython): + ipython.events.register("pre_run_cell", self.pre_run_cell) + self.shell = ipython + self.current_cell: str = "" + + def pre_run_cell(self, info): + self._current_cell = info.cell_id + + +class JupyterReloader(BaseReloader): + """Swap a running blocks class in a notebook with the latest cell contents.""" + + def __init__(self, ipython) -> None: + super().__init__() + self._cell_tracker = CellIdTracker(ipython) + self._running: dict[str, gr.Blocks] = {} + + @property + def current_cell(self): + return self._cell_tracker.current_cell + + @property + def running_app(self) -> App: + assert self.running_demo.server + return self.running_demo.server.running_app # type: ignore + + @property + def running_demo(self): + return self._running[self.current_cell] + + def demo_tracked(self) -> bool: + return self.current_cell in self._running + + def track(self, demo: gr.Blocks): + self._running[self.current_cell] = demo + + +def load_ipython_extension(ipython): + reloader = JupyterReloader(ipython) + + @magic_arguments() + @argument("--demo-name", default="demo", help="Name of gradio blocks instance.") + @argument( + "--share", + default=False, + const=True, + nargs="?", + help="Whether to launch with sharing. Will slow down reloading.", + ) + @register_cell_magic + @needs_local_scope + def blocks(line, cell, local_ns): + """Launch a demo defined in a cell in reload mode.""" + + args = parse_argstring(blocks, line) + + exec(cell, None, local_ns) + demo: gr.Blocks = local_ns[args.demo_name] + if not reloader.demo_tracked(): + demo.launch(share=args.share) + reloader.track(demo) + elif reloader.queue_changed(demo): + print("Queue got added or removed. Restarting demo.") + reloader.running_demo.close() + demo.launch() + reloader.track(demo) + else: + reloader.swap_blocks(demo) + return reloader.running_demo.artifact diff --git a/testbed/gradio-app__gradio/gradio/layouts.py b/testbed/gradio-app__gradio/gradio/layouts.py new file mode 100644 index 0000000000000000000000000000000000000000..c80fe5d5deb30528cd7d845268bdc7f24c5fb43c --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/layouts.py @@ -0,0 +1,373 @@ +from __future__ import annotations + +import warnings +from typing import TYPE_CHECKING, Literal + +from gradio_client.documentation import document, set_documentation_group + +from gradio.blocks import BlockContext, Updateable +from gradio.deprecation import warn_deprecation, warn_style_method_deprecation +from gradio.events import Changeable, Selectable + +if TYPE_CHECKING: + from gradio.blocks import Block + +set_documentation_group("layout") + + +@document() +class Row(Updateable, BlockContext): + """ + Row is a layout element within Blocks that renders all children horizontally. + Example: + with gr.Blocks() as demo: + with gr.Row(): + gr.Image("lion.jpg", scale=2) + gr.Image("tiger.jpg", scale=1) + demo.launch() + Guides: controlling-layout + """ + + def __init__( + self, + *, + variant: Literal["default", "panel", "compact"] = "default", + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + equal_height: bool = True, + **kwargs, + ): + """ + Parameters: + variant: row type, 'default' (no background), 'panel' (gray background color and rounded corners), or 'compact' (rounded corners and no internal gap). + visible: If False, row will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional string or list of strings that are assigned as the class of this component in the HTML DOM. Can be used for targeting CSS styles. + equal_height: If True, makes every child element have equal height + """ + self.variant = variant + self.equal_height = equal_height + if variant == "compact": + self.allow_expected_parents = False + BlockContext.__init__( + self, visible=visible, elem_id=elem_id, elem_classes=elem_classes, **kwargs + ) + + @staticmethod + def update( + visible: bool | None = None, + ): + return { + "visible": visible, + "__type__": "update", + } + + def style( + self, + *, + equal_height: bool | None = None, + **kwargs, + ): + """ + Styles the Row. + Parameters: + equal_height: If True, makes every child element have equal height + """ + warn_style_method_deprecation() + if equal_height is not None: + self.equal_height = equal_height + return self + + +@document() +class Column(Updateable, BlockContext): + """ + Column is a layout element within Blocks that renders all children vertically. The widths of columns can be set through the `scale` and `min_width` parameters. + If a certain scale results in a column narrower than min_width, the min_width parameter will win. + Example: + with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(scale=1): + text1 = gr.Textbox() + text2 = gr.Textbox() + with gr.Column(scale=4): + btn1 = gr.Button("Button 1") + btn2 = gr.Button("Button 2") + Guides: controlling-layout + """ + + def __init__( + self, + *, + scale: int = 1, + min_width: int = 320, + variant: Literal["default", "panel", "compact"] = "default", + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + scale: relative width compared to adjacent Columns. For example, if Column A has scale=2, and Column B has scale=1, A will be twice as wide as B. + min_width: minimum pixel width of Column, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in a column narrower than min_width, the min_width parameter will be respected first. + variant: column type, 'default' (no background), 'panel' (gray background color and rounded corners), or 'compact' (rounded corners and no internal gap). + visible: If False, column will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional string or list of strings that are assigned as the class of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + if scale != round(scale): + warn_deprecation( + f"'scale' value should be an integer. Using {scale} will cause issues." + ) + + self.scale = scale + self.min_width = min_width + self.variant = variant + if variant == "compact": + self.allow_expected_parents = False + BlockContext.__init__( + self, visible=visible, elem_id=elem_id, elem_classes=elem_classes, **kwargs + ) + + @staticmethod + def update( + variant: str | None = None, + visible: bool | None = None, + ): + return { + "variant": variant, + "visible": visible, + "__type__": "update", + } + + +class Tabs(BlockContext, Changeable, Selectable): + """ + Tabs is a layout element within Blocks that can contain multiple "Tab" Components. + """ + + def __init__( + self, + *, + selected: int | str | None = None, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + selected: The currently selected tab. Must correspond to an id passed to the one of the child TabItems. Defaults to the first TabItem. + visible: If False, Tabs will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional string or list of strings that are assigned as the class of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + BlockContext.__init__( + self, visible=visible, elem_id=elem_id, elem_classes=elem_classes, **kwargs + ) + Changeable.__init__(self) + Selectable.__init__(self) + self.selected = selected + + @staticmethod + def update( + selected: int | str | None = None, + ): + return { + "selected": selected, + "__type__": "update", + } + + +@document() +class Tab(BlockContext, Selectable): + """ + Tab (or its alias TabItem) is a layout element. Components defined within the Tab will be visible when this tab is selected tab. + Example: + with gr.Blocks() as demo: + with gr.Tab("Lion"): + gr.Image("lion.jpg") + gr.Button("New Lion") + with gr.Tab("Tiger"): + gr.Image("tiger.jpg") + gr.Button("New Tiger") + Guides: controlling-layout + """ + + def __init__( + self, + label: str, + *, + id: int | str | None = None, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + label: The visual label for the tab + id: An optional identifier for the tab, required if you wish to control the selected tab from a predict function. + elem_id: An optional string that is assigned as the id of the
containing the contents of the Tab layout. The same string followed by "-button" is attached to the Tab button. Can be used for targeting CSS styles. + elem_classes: An optional string or list of strings that are assigned as the class of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + BlockContext.__init__( + self, elem_id=elem_id, elem_classes=elem_classes, **kwargs + ) + Selectable.__init__(self) + self.label = label + self.id = id + + def get_expected_parent(self) -> type[Tabs]: + return Tabs + + def get_block_name(self): + return "tabitem" + + +TabItem = Tab + + +@document() +class Group(Updateable, BlockContext): + """ + Group is a layout element within Blocks which groups together children so that + they do not have any padding or margin between them. + Example: + with gr.Group(): + gr.Textbox(label="First") + gr.Textbox(label="Last") + """ + + def __init__( + self, + *, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + visible: If False, group will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional string or list of strings that are assigned as the class of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + BlockContext.__init__( + self, visible=visible, elem_id=elem_id, elem_classes=elem_classes, **kwargs + ) + + @staticmethod + def update( + visible: bool | None = None, + ): + return { + "visible": visible, + "__type__": "update", + } + + +class Box(Updateable, BlockContext): + """ + DEPRECATED. + Box is a a layout element which places children in a box with rounded corners and + some padding around them. + Example: + with gr.Box(): + gr.Textbox(label="First") + gr.Textbox(label="Last") + """ + + def __init__( + self, + *, + visible: bool = True, + elem_id: str | None = None, + **kwargs, + ): + """ + Parameters: + visible: If False, box will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + warnings.warn("gr.Box is deprecated. Use gr.Group instead.", DeprecationWarning) + BlockContext.__init__(self, visible=visible, elem_id=elem_id, **kwargs) + + @staticmethod + def update( + visible: bool | None = None, + ): + return { + "visible": visible, + "__type__": "update", + } + + def style(self, **kwargs): + warn_style_method_deprecation() + return self + + +class Form(Updateable, BlockContext): + def __init__(self, *, scale: int = 0, min_width: int = 0, **kwargs): + """ + Parameters: + scale: relative width compared to adjacent Columns. For example, if Column A has scale=2, and Column B has scale=1, A will be twice as wide as B. + min_width: minimum pixel width of Column, will wrap if not sufficient screen space to satisfy this value. If a certain scale value results in a column narrower than min_width, the min_width parameter will be respected first. + """ + self.scale = scale + self.min_width = min_width + BlockContext.__init__(self, **kwargs) + + def add_child(self, child: Block): + if isinstance(self.parent, Row): + scale = getattr(child, "scale", None) + self.scale += 1 if scale is None else scale + self.min_width += getattr(child, "min_width", 0) or 0 + BlockContext.add_child(self, child) + + +@document() +class Accordion(Updateable, BlockContext): + """ + Accordion is a layout element which can be toggled to show/hide the contained content. + Example: + with gr.Accordion("See Details"): + gr.Markdown("lorem ipsum") + """ + + def __init__( + self, + label, + *, + open: bool = True, + visible: bool = True, + elem_id: str | None = None, + elem_classes: list[str] | str | None = None, + **kwargs, + ): + """ + Parameters: + label: name of accordion section. + open: if True, accordion is open by default. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + elem_classes: An optional string or list of strings that are assigned as the class of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + self.label = label + self.open = open + BlockContext.__init__( + self, visible=visible, elem_id=elem_id, elem_classes=elem_classes, **kwargs + ) + + @staticmethod + def update( + open: bool | None = None, + label: str | None = None, + visible: bool | None = None, + ): + return { + "visible": visible, + "label": label, + "open": open, + "__type__": "update", + } diff --git a/testbed/gradio-app__gradio/gradio/mix.py b/testbed/gradio-app__gradio/gradio/mix.py new file mode 100644 index 0000000000000000000000000000000000000000..caf2c68b835101c4f3d18d3d53fbb1b8494b3dba --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/mix.py @@ -0,0 +1,129 @@ +""" +Ways to transform interfaces to produce new interfaces +""" +import asyncio +import warnings + +from gradio_client.documentation import document, set_documentation_group + +import gradio + +set_documentation_group("mix_interface") + + +@document() +class Parallel(gradio.Interface): + """ + Creates a new Interface consisting of multiple Interfaces in parallel (comparing their outputs). + The Interfaces to put in Parallel must share the same input components (but can have different output components). + + Demos: interface_parallel, interface_parallel_load + Guides: advanced-interface-features + """ + + def __init__(self, *interfaces: gradio.Interface, **options): + """ + Parameters: + interfaces: any number of Interface objects that are to be compared in parallel + options: additional kwargs that are passed into the new Interface object to customize it + Returns: + an Interface object comparing the given models + """ + outputs = [] + + for interface in interfaces: + if not (isinstance(interface, gradio.Interface)): + warnings.warn( + "Parallel requires all inputs to be of type Interface. " + "May not work as expected." + ) + outputs.extend(interface.output_components) + + async def parallel_fn(*args): + return_values_with_durations = await asyncio.gather( + *[interface.call_function(0, list(args)) for interface in interfaces] + ) + return_values = [rv["prediction"] for rv in return_values_with_durations] + combined_list = [] + for interface, return_value in zip(interfaces, return_values): + if len(interface.output_components) == 1: + combined_list.append(return_value) + else: + combined_list.extend(return_value) + if len(outputs) == 1: + return combined_list[0] + return combined_list + + parallel_fn.__name__ = " | ".join([io.__name__ for io in interfaces]) + + kwargs = { + "fn": parallel_fn, + "inputs": interfaces[0].input_components, + "outputs": outputs, + } + kwargs.update(options) + super().__init__(**kwargs) + + +@document() +class Series(gradio.Interface): + """ + Creates a new Interface from multiple Interfaces in series (the output of one is fed as the input to the next, + and so the input and output components must agree between the interfaces). + + Demos: interface_series, interface_series_load + Guides: advanced-interface-features + """ + + def __init__(self, *interfaces: gradio.Interface, **options): + """ + Parameters: + interfaces: any number of Interface objects that are to be connected in series + options: additional kwargs that are passed into the new Interface object to customize it + Returns: + an Interface object connecting the given models + """ + + async def connected_fn(*data): + for idx, interface in enumerate(interfaces): + # skip preprocessing for first interface since the Series interface will include it + if idx > 0 and not (interface.api_mode): + data = [ + input_component.preprocess(data[i]) + for i, input_component in enumerate(interface.input_components) + ] + + # run all of predictions sequentially + data = (await interface.call_function(0, list(data)))["prediction"] + if len(interface.output_components) == 1: + data = [data] + + # skip postprocessing for final interface since the Series interface will include it + if idx < len(interfaces) - 1 and not (interface.api_mode): + data = [ + output_component.postprocess(data[i]) + for i, output_component in enumerate( + interface.output_components + ) + ] + + if len(interface.output_components) == 1: # type: ignore + return data[0] + return data + + for interface in interfaces: + if not (isinstance(interface, gradio.Interface)): + warnings.warn( + "Series requires all inputs to be of type Interface. May " + "not work as expected." + ) + connected_fn.__name__ = " => ".join([io.__name__ for io in interfaces]) + + kwargs = { + "fn": connected_fn, + "inputs": interfaces[0].input_components, + "outputs": interfaces[-1].output_components, + "_api_mode": interfaces[0].api_mode, # TODO: set api_mode per-interface + } + kwargs.update(options) + super().__init__(**kwargs) diff --git a/testbed/gradio-app__gradio/gradio/networking.py b/testbed/gradio-app__gradio/gradio/networking.py new file mode 100644 index 0000000000000000000000000000000000000000..b216f0191e24bfef315d7a6a399df4e78317d683 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/networking.py @@ -0,0 +1,246 @@ +""" +Defines helper methods useful for setting up ports, launching servers, and +creating tunnels. +""" +from __future__ import annotations + +import os +import socket +import threading +import time +import warnings +from functools import partial +from typing import TYPE_CHECKING + +import requests +import uvicorn +from uvicorn.config import Config + +from gradio.exceptions import ServerFailedToStartError +from gradio.routes import App +from gradio.tunneling import Tunnel +from gradio.utils import SourceFileReloader, watchfn + +if TYPE_CHECKING: # Only import for type checking (to avoid circular imports). + from gradio.blocks import Blocks + +# By default, the local server will try to open on localhost, port 7860. +# If that is not available, then it will try 7861, 7862, ... 7959. +INITIAL_PORT_VALUE = int(os.getenv("GRADIO_SERVER_PORT", "7860")) +TRY_NUM_PORTS = int(os.getenv("GRADIO_NUM_PORTS", "100")) +LOCALHOST_NAME = os.getenv("GRADIO_SERVER_NAME", "127.0.0.1") +GRADIO_API_SERVER = "https://api.gradio.app/v2/tunnel-request" + +should_watch = bool(os.getenv("GRADIO_WATCH_DIRS", False)) +GRADIO_WATCH_DIRS = ( + os.getenv("GRADIO_WATCH_DIRS", "").split(",") if should_watch else [] +) +GRADIO_WATCH_FILE = os.getenv("GRADIO_WATCH_FILE", "app") +GRADIO_WATCH_DEMO_NAME = os.getenv("GRADIO_WATCH_DEMO_NAME", "demo") + + +class Server(uvicorn.Server): + def __init__( + self, config: Config, reloader: SourceFileReloader | None = None + ) -> None: + self.running_app = config.app + super().__init__(config) + self.reloader = reloader + if self.reloader: + self.event = threading.Event() + self.watch = partial(watchfn, self.reloader) + + def install_signal_handlers(self): + pass + + def run_in_thread(self): + self.thread = threading.Thread(target=self.run, daemon=True) + if self.reloader: + self.watch_thread = threading.Thread(target=self.watch, daemon=True) + self.watch_thread.start() + self.thread.start() + start = time.time() + while not self.started: + time.sleep(1e-3) + if time.time() - start > 5: + raise ServerFailedToStartError( + "Server failed to start. Please check that the port is available." + ) + + def close(self): + self.should_exit = True + if self.reloader: + self.reloader.stop() + self.watch_thread.join() + self.thread.join() + + +def get_first_available_port(initial: int, final: int) -> int: + """ + Gets the first open port in a specified range of port numbers + Parameters: + initial: the initial value in the range of port numbers + final: final (exclusive) value in the range of port numbers, should be greater than `initial` + Returns: + port: the first open port in the range + """ + for port in range(initial, final): + try: + s = socket.socket() # create a socket object + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind((LOCALHOST_NAME, port)) # Bind to the port + s.close() + return port + except OSError: + pass + raise OSError( + f"All ports from {initial} to {final - 1} are in use. Please close a port." + ) + + +def configure_app(app: App, blocks: Blocks) -> App: + auth = blocks.auth + if auth is not None: + if not callable(auth): + app.auth = {account[0]: account[1] for account in auth} + else: + app.auth = auth + else: + app.auth = None + app.blocks = blocks + app.cwd = os.getcwd() + app.favicon_path = blocks.favicon_path + app.tokens = {} + return app + + +def start_server( + blocks: Blocks, + server_name: str | None = None, + server_port: int | None = None, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_keyfile_password: str | None = None, + app_kwargs: dict | None = None, +) -> tuple[str, int, str, App, Server]: + """Launches a local server running the provided Interface + Parameters: + blocks: The Blocks object to run on the server + server_name: to make app accessible on local network, set this to "0.0.0.0". Can be set by environment variable GRADIO_SERVER_NAME. + server_port: will start gradio app on this port (if available). Can be set by environment variable GRADIO_SERVER_PORT. + auth: If provided, username and password (or list of username-password tuples) required to access the Blocks. Can also provide function that takes username and password and returns True if valid login. + ssl_keyfile: If a path to a file is provided, will use this as the private key file to create a local server running on https. + ssl_certfile: If a path to a file is provided, will use this as the signed certificate for https. Needs to be provided if ssl_keyfile is provided. + ssl_keyfile_password: If a password is provided, will use this with the ssl certificate for https. + app_kwargs: Additional keyword arguments to pass to the gradio.routes.App constructor. + + Returns: + port: the port number the server is running on + path_to_local_server: the complete address that the local server can be accessed at + app: the FastAPI app object + server: the server object that is a subclass of uvicorn.Server (used to close the server) + """ + if ssl_keyfile is not None and ssl_certfile is None: + raise ValueError("ssl_certfile must be provided if ssl_keyfile is provided.") + + server_name = server_name or LOCALHOST_NAME + url_host_name = "localhost" if server_name == "0.0.0.0" else server_name + + # Strip IPv6 brackets from the address if they exist. + # This is needed as http://[::1]:port/ is a valid browser address, + # but not a valid IPv6 address, so asyncio will throw an exception. + if server_name.startswith("[") and server_name.endswith("]"): + host = server_name[1:-1] + else: + host = server_name + + app = App.create_app(blocks, app_kwargs=app_kwargs) + + server_ports = ( + [server_port] + if server_port is not None + else range(INITIAL_PORT_VALUE, INITIAL_PORT_VALUE + TRY_NUM_PORTS) + ) + + for port in server_ports: + try: + # The fastest way to check if a port is available is to try to bind to it with socket. + # If the port is not available, socket will throw an OSError. + s = socket.socket() + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + # Really, we should be checking if (server_name, server_port) is available, but + # socket.bind() doesn't seem to throw an OSError with ipv6 addresses, based on my testing. + # Instead, we just check if the port is available on localhost. + s.bind((LOCALHOST_NAME, port)) + s.close() + + # To avoid race conditions, so we also check if the port by trying to start the uvicorn server. + # If the port is not available, this will throw a ServerFailedToStartError. + config = uvicorn.Config( + app=app, + port=port, + host=host, + log_level="warning", + ssl_keyfile=ssl_keyfile, + ssl_certfile=ssl_certfile, + ssl_keyfile_password=ssl_keyfile_password, + ws_max_size=1024 * 1024 * 1024, # Setting max websocket size to be 1 GB + ) + reloader = None + if GRADIO_WATCH_DIRS: + change_event = threading.Event() + app.change_event = change_event + reloader = SourceFileReloader( + app=app, + watch_dirs=GRADIO_WATCH_DIRS, + watch_file=GRADIO_WATCH_FILE, + demo_name=GRADIO_WATCH_DEMO_NAME, + stop_event=threading.Event(), + change_event=change_event, + ) + server = Server(config=config, reloader=reloader) + server.run_in_thread() + break + except (OSError, ServerFailedToStartError): + pass + else: + raise OSError( + f"Cannot find empty port in range: {min(server_ports)}-{max(server_ports)}. You can specify a different port by setting the GRADIO_SERVER_PORT environment variable or passing the `server_port` parameter to `launch()`." + ) + + if ssl_keyfile is not None: + path_to_local_server = f"https://{url_host_name}:{port}/" + else: + path_to_local_server = f"http://{url_host_name}:{port}/" + + return server_name, port, path_to_local_server, app, server + + +def setup_tunnel(local_host: str, local_port: int, share_token: str) -> str: + response = requests.get(GRADIO_API_SERVER) + if response and response.status_code == 200: + try: + payload = response.json()[0] + remote_host, remote_port = payload["host"], int(payload["port"]) + tunnel = Tunnel( + remote_host, remote_port, local_host, local_port, share_token + ) + address = tunnel.start_tunnel() + return address + except Exception as e: + raise RuntimeError(str(e)) from e + raise RuntimeError("Could not get share link from Gradio API Server.") + + +def url_ok(url: str) -> bool: + try: + for _ in range(5): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore") + r = requests.head(url, timeout=3, verify=False) + if r.status_code in (200, 401, 302): # 401 or 302 if auth is set + return True + time.sleep(0.500) + except (ConnectionError, requests.exceptions.ConnectionError): + return False + return False diff --git a/testbed/gradio-app__gradio/gradio/oauth.py b/testbed/gradio-app__gradio/gradio/oauth.py new file mode 100644 index 0000000000000000000000000000000000000000..ccb63a79b7584ce82fa08bb4df0bf9332d0729ec --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/oauth.py @@ -0,0 +1,186 @@ +from __future__ import annotations + +import hashlib +import os +import typing +import warnings + +import fastapi +from fastapi.responses import RedirectResponse + +from .utils import get_space + +OAUTH_CLIENT_ID = os.environ.get("OAUTH_CLIENT_ID") +OAUTH_CLIENT_SECRET = os.environ.get("OAUTH_CLIENT_SECRET") +OAUTH_SCOPES = os.environ.get("OAUTH_SCOPES") +OPENID_PROVIDER_URL = os.environ.get("OPENID_PROVIDER_URL") + + +def attach_oauth(app: fastapi.FastAPI): + try: + from starlette.middleware.sessions import SessionMiddleware + except ImportError as e: + raise ImportError( + "Cannot initialize OAuth to due a missing library. Please run `pip install gradio[oauth]` or add " + "`gradio[oauth]` to your requirements.txt file in order to install the required dependencies." + ) from e + + # Add `/login/huggingface`, `/login/callback` and `/logout` routes to enable OAuth in the Gradio app. + # If the app is running in a Space, OAuth is enabled normally. Otherwise, we mock the "real" routes to make the + # user log in with a fake user profile - without any calls to hf.co. + if get_space() is not None: + _add_oauth_routes(app) + else: + _add_mocked_oauth_routes(app) + + # Session Middleware requires a secret key to sign the cookies. Let's use a hash + # of the OAuth secret key to make it unique to the Space + updated in case OAuth + # config gets updated. + app.add_middleware( + SessionMiddleware, + secret_key=hashlib.sha256((OAUTH_CLIENT_SECRET or "").encode()).hexdigest(), + same_site="none", + https_only=True, + ) + + +def _add_oauth_routes(app: fastapi.FastAPI) -> None: + """Add OAuth routes to the FastAPI app (login, callback handler and logout).""" + try: + from authlib.integrations.starlette_client import OAuth + except ImportError as e: + raise ImportError( + "Cannot initialize OAuth to due a missing library. Please run `pip install gradio[oauth]` or add " + "`gradio[oauth]` to your requirements.txt file in order to install the required dependencies." + ) from e + + # Check environment variables + msg = ( + "OAuth is required but {} environment variable is not set. Make sure you've enabled OAuth in your Space by" + " setting `hf_oauth: true` in the Space metadata." + ) + if OAUTH_CLIENT_ID is None: + raise ValueError(msg.format("OAUTH_CLIENT_ID")) + if OAUTH_CLIENT_SECRET is None: + raise ValueError(msg.format("OAUTH_CLIENT_SECRET")) + if OAUTH_SCOPES is None: + raise ValueError(msg.format("OAUTH_SCOPES")) + if OPENID_PROVIDER_URL is None: + raise ValueError(msg.format("OPENID_PROVIDER_URL")) + + # Register OAuth server + oauth = OAuth() + oauth.register( + name="huggingface", + client_id=OAUTH_CLIENT_ID, + client_secret=OAUTH_CLIENT_SECRET, + client_kwargs={"scope": OAUTH_SCOPES}, + server_metadata_url=OPENID_PROVIDER_URL + "/.well-known/openid-configuration", + ) + + # Define OAuth routes + @app.get("/login/huggingface") + async def oauth_login(request: fastapi.Request): + """Endpoint that redirects to HF OAuth page.""" + redirect_uri = str(request.url_for("oauth_redirect_callback")) + if ".hf.space" in redirect_uri: + # In Space, FastAPI redirect as http but we want https + redirect_uri = redirect_uri.replace("http://", "https://") + return await oauth.huggingface.authorize_redirect(request, redirect_uri) + + @app.get("/login/callback") + async def oauth_redirect_callback(request: fastapi.Request) -> RedirectResponse: + """Endpoint that handles the OAuth callback.""" + token = await oauth.huggingface.authorize_access_token(request) + request.session["oauth_profile"] = token["userinfo"] + request.session["oauth_token"] = token + return RedirectResponse("/") + + @app.get("/logout") + async def oauth_logout(request: fastapi.Request) -> RedirectResponse: + """Endpoint that logs out the user (e.g. delete cookie session).""" + request.session.pop("oauth_profile", None) + request.session.pop("oauth_token", None) + return RedirectResponse("/") + + +def _add_mocked_oauth_routes(app: fastapi.FastAPI) -> None: + """Add fake oauth routes if Gradio is run locally and OAuth is enabled. + + Clicking on a gr.LoginButton will have the same behavior as in a Space (i.e. gets redirected in a new tab) but + instead of authenticating with HF, a mocked user profile is added to the session. + """ + warnings.warn( + "Gradio does not support OAuth features outside of a Space environment. " + "To help you debug your app locally, the login and logout buttons are mocked with a fake user profile." + ) + + # Define OAuth routes + @app.get("/login/huggingface") + async def oauth_login(request: fastapi.Request): + """Fake endpoint that redirects to HF OAuth page.""" + return RedirectResponse("/login/callback") + + @app.get("/login/callback") + async def oauth_redirect_callback(request: fastapi.Request) -> RedirectResponse: + """Endpoint that handles the OAuth callback.""" + request.session["oauth_profile"] = MOCKED_OAUTH_TOKEN["userinfo"] + request.session["oauth_token"] = MOCKED_OAUTH_TOKEN + return RedirectResponse("/") + + @app.get("/logout") + async def oauth_logout(request: fastapi.Request) -> RedirectResponse: + """Endpoint that logs out the user (e.g. delete cookie session).""" + request.session.pop("oauth_profile", None) + request.session.pop("oauth_token", None) + return RedirectResponse("/") + + +class OAuthProfile(typing.Dict): + """ + A Gradio OAuthProfile object that can be used to inject the profile of a user in a + function. If a function expects `OAuthProfile` or `Optional[OAuthProfile]` as input, + the value will be injected from the FastAPI session if the user is logged in. If the + user is not logged in and the function expects `OAuthProfile`, an error will be + raised. + + Example: + import gradio as gr + from typing import Optional + + + def hello(profile: Optional[gr.OAuthProfile]) -> str: + if profile is None: + return "I don't know you." + return f"Hello {profile.name}" + + + with gr.Blocks() as demo: + gr.LoginButton() + gr.LogoutButton() + gr.Markdown().attach_load_event(hello, None) + """ + + +MOCKED_OAUTH_TOKEN = { + "access_token": "hf_oauth_AAAAAAAAAAAAAAAAAAAAAAAAAA", + "token_type": "bearer", + "expires_in": 3600, + "id_token": "AAAAAAAAAAAAAAAAAAAAAAAAAA", + "scope": "openid profile", + "expires_at": 1691676444, + "userinfo": { + "sub": "11111111111111111111111", + "name": "Fake Gradio User", + "preferred_username": "FakeGradioUser", + "profile": "https://huggingface.co/FakeGradioUser", + "picture": "https://huggingface.co/front/assets/huggingface_logo-noborder.svg", + "website": "", + "aud": "00000000-0000-0000-0000-000000000000", + "auth_time": 1691672844, + "nonce": "aaaaaaaaaaaaaaaaaaa", + "iat": 1691672844, + "exp": 1691676444, + "iss": "https://huggingface.co", + }, +} diff --git a/testbed/gradio-app__gradio/gradio/outputs.py b/testbed/gradio-app__gradio/gradio/outputs.py new file mode 100644 index 0000000000000000000000000000000000000000..b6d2d20c8f5ecc18e4efda53e1882055332d0756 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/outputs.py @@ -0,0 +1,313 @@ +# type: ignore +""" +This module defines various classes that can serve as the `output` to an interface. Each class must inherit from +`OutputComponent`, and each class must define a path to its template. All of the subclasses of `OutputComponent` are +automatically added to a registry, which allows them to be easily referenced in other parts of the code. +""" + +from __future__ import annotations + +from typing import Optional + +from gradio import components +from gradio.deprecation import warn_deprecation + + +def warn_outputs_deprecation(): + warn_deprecation( + "Usage of gradio.outputs is deprecated, and will not be supported in the future, " + "please import your components from gradio.components", + ) + + +class Textbox(components.Textbox): + def __init__( + self, + type: str = "text", + label: Optional[str] = None, + ): + warn_outputs_deprecation() + super().__init__(label=label, type=type) + + +class Image(components.Image): + """ + Component displays an output image. + Output type: Union[numpy.array, PIL.Image, str, matplotlib.pyplot, Tuple[Union[numpy.array, PIL.Image, str], List[Tuple[str, float, float, float, float]]]] + """ + + def __init__( + self, type: str = "auto", plot: bool = False, label: Optional[str] = None + ): + """ + Parameters: + type (str): Type of value to be passed to component. "numpy" expects a numpy array with shape (height, width, 3), "pil" expects a PIL image object, "file" expects a file path to the saved image or a remote URL, "plot" expects a matplotlib.pyplot object, "auto" detects return type. + plot (bool): DEPRECATED. Whether to expect a plot to be returned by the function. + label (str): component name in interface. + """ + warn_outputs_deprecation() + if plot: + type = "plot" + super().__init__(type=type, label=label) + + +class Video(components.Video): + """ + Used for video output. + Output type: filepath + """ + + def __init__(self, type: Optional[str] = None, label: Optional[str] = None): + """ + Parameters: + type (str): Type of video format to be passed to component, such as 'avi' or 'mp4'. Use 'mp4' to ensure browser playability. If set to None, video will keep returned format. + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__(format=type, label=label) + + +class Audio(components.Audio): + """ + Creates an audio player that plays the output audio. + Output type: Union[Tuple[int, numpy.array], str] + """ + + def __init__(self, type: str = "auto", label: Optional[str] = None): + """ + Parameters: + type (str): Type of value to be passed to component. "numpy" returns a 2-set tuple with an integer sample_rate and the data as 16-bit int numpy.array of shape (samples, 2), "file" returns a temporary file path to the saved wav audio file, "auto" detects return type. + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__(type=type, label=label) + + +class File(components.File): + """ + Used for file output. + Output type: Union[file-like, str] + """ + + def __init__(self, label: Optional[str] = None): + """ + Parameters: + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__(label=label) + + +class Dataframe(components.Dataframe): + """ + Component displays 2D output through a spreadsheet interface. + Output type: Union[pandas.DataFrame, numpy.array, List[Union[str, float]], List[List[Union[str, float]]]] + """ + + def __init__( + self, + headers: Optional[list[str]] = None, + max_rows: Optional[int] = 20, + max_cols: Optional[int] = None, + overflow_row_behaviour: str = "paginate", + type: str = "auto", + label: Optional[str] = None, + ): + """ + Parameters: + headers (List[str]): Header names to dataframe. Only applicable if type is "numpy" or "array". + max_rows (int): Maximum number of rows to display at once. Set to None for infinite. + max_cols (int): Maximum number of columns to display at once. Set to None for infinite. + overflow_row_behaviour (str): If set to "paginate", will create pages for overflow rows. If set to "show_ends", will show initial and final rows and truncate middle rows. + type (str): Type of value to be passed to component. "pandas" for pandas dataframe, "numpy" for numpy array, or "array" for Python array, "auto" detects return type. + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__( + headers=headers, + type=type, + label=label, + max_rows=max_rows, + max_cols=max_cols, + overflow_row_behaviour=overflow_row_behaviour, + ) + + +class Timeseries(components.Timeseries): + """ + Component accepts pandas.DataFrame. + Output type: pandas.DataFrame + """ + + def __init__( + self, x: str = None, y: str | list[str] = None, label: Optional[str] = None + ): + """ + Parameters: + x (str): Column name of x (time) series. None if csv has no headers, in which case first column is x series. + y (Union[str, List[str]]): Column name of y series, or list of column names if multiple series. None if csv has no headers, in which case every column after first is a y series. + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__(x=x, y=y, label=label) + + +class State(components.State): + """ + Special hidden component that stores state across runs of the interface. + Output type: Any + """ + + def __init__(self, label: Optional[str] = None): + """ + Parameters: + label (str): component name in interface (not used). + """ + warn_outputs_deprecation() + super().__init__(label=label) + + +class Label(components.Label): + """ + Component outputs a classification label, along with confidence scores of top categories if provided. Confidence scores are represented as a dictionary mapping labels to scores between 0 and 1. + Output type: Union[Dict[str, float], str, int, float] + """ + + def __init__( + self, + num_top_classes: Optional[int] = None, + type: str = "auto", + label: Optional[str] = None, + ): + """ + Parameters: + num_top_classes (int): number of most confident classes to show. + type (str): Type of value to be passed to component. "value" expects a single out label, "confidences" expects a dictionary mapping labels to confidence scores, "auto" detects return type. + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__(num_top_classes=num_top_classes, type=type, label=label) + + +class KeyValues: + """ + Component displays a table representing values for multiple fields. + Output type: Union[Dict, List[Tuple[str, Union[str, int, float]]]] + """ + + def __init__(self, value: str = " ", *, label: Optional[str] = None, **kwargs): + """ + Parameters: + value (str): IGNORED + label (str): component name in interface. + """ + raise DeprecationWarning( + "The KeyValues component is deprecated. Please use the DataFrame or JSON " + "components instead." + ) + + +class HighlightedText(components.HighlightedText): + """ + Component creates text that contains spans that are highlighted by category or numerical value. + Output is represent as a list of Tuple pairs, where the first element represents the span of text represented by the tuple, and the second element represents the category or value of the text. + Output type: List[Tuple[str, Union[float, str]]] + """ + + def __init__( + self, + color_map: dict[str, str] = None, + label: Optional[str] = None, + show_legend: bool = False, + ): + """ + Parameters: + color_map (Dict[str, str]): Map between category and respective colors + label (str): component name in interface. + show_legend (bool): whether to show span categories in a separate legend or inline. + """ + warn_outputs_deprecation() + super().__init__(color_map=color_map, label=label, show_legend=show_legend) + + +class JSON(components.JSON): + """ + Used for JSON output. Expects a JSON string or a Python object that is JSON serializable. + Output type: Union[str, Any] + """ + + def __init__(self, label: Optional[str] = None): + """ + Parameters: + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__(label=label) + + +class HTML(components.HTML): + """ + Used for HTML output. Expects an HTML valid string. + Output type: str + """ + + def __init__(self, label: Optional[str] = None): + """ + Parameters: + label (str): component name in interface. + """ + super().__init__(label=label) + + +class Carousel(components.Carousel): + """ + Component displays a set of output components that can be scrolled through. + """ + + def __init__( + self, + components: components.Component | list[components.Component], + label: Optional[str] = None, + ): + """ + Parameters: + components (Union[List[Component], Component]): Classes of component(s) that will be scrolled through. + label (str): component name in interface. + """ + warn_outputs_deprecation() + super().__init__(components=components, label=label) + + +class Chatbot(components.Chatbot): + """ + Component displays a chatbot output showing both user submitted messages and responses + Output type: List[Tuple[str, str]] + """ + + def __init__(self, label: Optional[str] = None): + """ + Parameters: + label (str): component name in interface (not used). + """ + warn_outputs_deprecation() + super().__init__(label=label) + + +class Image3D(components.Model3D): + """ + Used for 3D image model output. + Input type: File object of type (.obj, glb, or .gltf) + """ + + def __init__( + self, + clear_color=None, + label: Optional[str] = None, + ): + """ + Parameters: + label (str): component name in interface. + optional (bool): If True, the interface can be submitted with no uploaded image, in which case the input value is None. + """ + warn_outputs_deprecation() + super().__init__(clear_color=clear_color, label=label) diff --git a/testbed/gradio-app__gradio/gradio/package.json b/testbed/gradio-app__gradio/gradio/package.json new file mode 100644 index 0000000000000000000000000000000000000000..c971f4f9e0d50703589c06449b676c465865ebf1 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/package.json @@ -0,0 +1,6 @@ +{ + "name": "gradio", + "version": "3.45.1", + "description": "", + "python": "true" +} diff --git a/testbed/gradio-app__gradio/gradio/pipelines.py b/testbed/gradio-app__gradio/gradio/pipelines.py new file mode 100644 index 0000000000000000000000000000000000000000..cb088c4d87efa6b61deb35f61bc8c2cceb614f19 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/pipelines.py @@ -0,0 +1,234 @@ +"""This module should not be used directly as its API is subject to change. Instead, +please use the `gr.Interface.from_pipeline()` function.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from gradio import components + +if TYPE_CHECKING: # Only import for type checking (is False at runtime). + from transformers import pipelines + + +def load_from_pipeline(pipeline: pipelines.base.Pipeline) -> dict: + """ + Gets the appropriate Interface kwargs for a given Hugging Face transformers.Pipeline. + pipeline (transformers.Pipeline): the transformers.Pipeline from which to create an interface + Returns: + (dict): a dictionary of kwargs that can be used to construct an Interface object + """ + try: + import transformers + from transformers import pipelines + except ImportError as ie: + raise ImportError( + "transformers not installed. Please try `pip install transformers`" + ) from ie + if not isinstance(pipeline, pipelines.base.Pipeline): + raise ValueError("pipeline must be a transformers.Pipeline") + + # Handle the different pipelines. The has_attr() checks to make sure the pipeline exists in the + # version of the transformers library that the user has installed. + if hasattr(transformers, "AudioClassificationPipeline") and isinstance( + pipeline, pipelines.audio_classification.AudioClassificationPipeline + ): + pipeline_info = { + "inputs": components.Audio( + source="microphone", + type="filepath", + label="Input", + render=False, + ), + "outputs": components.Label(label="Class", render=False), + "preprocess": lambda i: {"inputs": i}, + "postprocess": lambda r: {i["label"].split(", ")[0]: i["score"] for i in r}, + } + elif hasattr(transformers, "AutomaticSpeechRecognitionPipeline") and isinstance( + pipeline, + pipelines.automatic_speech_recognition.AutomaticSpeechRecognitionPipeline, + ): + pipeline_info = { + "inputs": components.Audio( + source="microphone", type="filepath", label="Input", render=False + ), + "outputs": components.Textbox(label="Output", render=False), + "preprocess": lambda i: {"inputs": i}, + "postprocess": lambda r: r["text"], + } + elif hasattr(transformers, "FeatureExtractionPipeline") and isinstance( + pipeline, pipelines.feature_extraction.FeatureExtractionPipeline + ): + pipeline_info = { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Dataframe(label="Output", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r[0], + } + elif hasattr(transformers, "FillMaskPipeline") and isinstance( + pipeline, pipelines.fill_mask.FillMaskPipeline + ): + pipeline_info = { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: {i["token_str"]: i["score"] for i in r}, + } + elif hasattr(transformers, "ImageClassificationPipeline") and isinstance( + pipeline, pipelines.image_classification.ImageClassificationPipeline + ): + pipeline_info = { + "inputs": components.Image( + type="filepath", label="Input Image", render=False + ), + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda i: {"images": i}, + "postprocess": lambda r: {i["label"].split(", ")[0]: i["score"] for i in r}, + } + elif hasattr(transformers, "QuestionAnsweringPipeline") and isinstance( + pipeline, pipelines.question_answering.QuestionAnsweringPipeline + ): + pipeline_info = { + "inputs": [ + components.Textbox(lines=7, label="Context", render=False), + components.Textbox(label="Question", render=False), + ], + "outputs": [ + components.Textbox(label="Answer", render=False), + components.Label(label="Score", render=False), + ], + "preprocess": lambda c, q: {"context": c, "question": q}, + "postprocess": lambda r: (r["answer"], r["score"]), + } + elif hasattr(transformers, "SummarizationPipeline") and isinstance( + pipeline, pipelines.text2text_generation.SummarizationPipeline + ): + pipeline_info = { + "inputs": components.Textbox(lines=7, label="Input", render=False), + "outputs": components.Textbox(label="Summary", render=False), + "preprocess": lambda x: {"inputs": x}, + "postprocess": lambda r: r[0]["summary_text"], + } + elif hasattr(transformers, "TextClassificationPipeline") and isinstance( + pipeline, pipelines.text_classification.TextClassificationPipeline + ): + pipeline_info = { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda x: [x], + "postprocess": lambda r: {i["label"].split(", ")[0]: i["score"] for i in r}, + } + elif hasattr(transformers, "TextGenerationPipeline") and isinstance( + pipeline, pipelines.text_generation.TextGenerationPipeline + ): + pipeline_info = { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Textbox(label="Output", render=False), + "preprocess": lambda x: {"text_inputs": x}, + "postprocess": lambda r: r[0]["generated_text"], + } + elif hasattr(transformers, "TranslationPipeline") and isinstance( + pipeline, pipelines.text2text_generation.TranslationPipeline + ): + pipeline_info = { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Textbox(label="Translation", render=False), + "preprocess": lambda x: [x], + "postprocess": lambda r: r[0]["translation_text"], + } + elif hasattr(transformers, "Text2TextGenerationPipeline") and isinstance( + pipeline, pipelines.text2text_generation.Text2TextGenerationPipeline + ): + pipeline_info = { + "inputs": components.Textbox(label="Input", render=False), + "outputs": components.Textbox(label="Generated Text", render=False), + "preprocess": lambda x: [x], + "postprocess": lambda r: r[0]["generated_text"], + } + elif hasattr(transformers, "ZeroShotClassificationPipeline") and isinstance( + pipeline, pipelines.zero_shot_classification.ZeroShotClassificationPipeline + ): + pipeline_info = { + "inputs": [ + components.Textbox(label="Input", render=False), + components.Textbox( + label="Possible class names (" "comma-separated)", render=False + ), + components.Checkbox(label="Allow multiple true classes", render=False), + ], + "outputs": components.Label(label="Classification", render=False), + "preprocess": lambda i, c, m: { + "sequences": i, + "candidate_labels": c, + "multi_label": m, + }, + "postprocess": lambda r: { + r["labels"][i]: r["scores"][i] for i in range(len(r["labels"])) + }, + } + elif hasattr(transformers, "DocumentQuestionAnsweringPipeline") and isinstance( + pipeline, + pipelines.document_question_answering.DocumentQuestionAnsweringPipeline, # type: ignore + ): + pipeline_info = { + "inputs": [ + components.Image(type="filepath", label="Input Document", render=False), + components.Textbox(label="Question", render=False), + ], + "outputs": components.Label(label="Label", render=False), + "preprocess": lambda img, q: {"image": img, "question": q}, + "postprocess": lambda r: {i["answer"]: i["score"] for i in r}, + } + elif hasattr(transformers, "VisualQuestionAnsweringPipeline") and isinstance( + pipeline, pipelines.visual_question_answering.VisualQuestionAnsweringPipeline + ): + pipeline_info = { + "inputs": [ + components.Image(type="filepath", label="Input Image", render=False), + components.Textbox(label="Question", render=False), + ], + "outputs": components.Label(label="Score", render=False), + "preprocess": lambda img, q: {"image": img, "question": q}, + "postprocess": lambda r: {i["answer"]: i["score"] for i in r}, + } + elif hasattr(transformers, "ImageToTextPipeline") and isinstance( + pipeline, pipelines.image_to_text.ImageToTextPipeline # type: ignore + ): + pipeline_info = { + "inputs": components.Image( + type="filepath", label="Input Image", render=False + ), + "outputs": components.Textbox(label="Text", render=False), + "preprocess": lambda i: {"images": i}, + "postprocess": lambda r: r[0]["generated_text"], + } + else: + raise ValueError(f"Unsupported pipeline type: {type(pipeline)}") + + # define the function that will be called by the Interface + def fn(*params): + data = pipeline_info["preprocess"](*params) + # special cases that needs to be handled differently + if isinstance( + pipeline, + ( + pipelines.text_classification.TextClassificationPipeline, + pipelines.text2text_generation.Text2TextGenerationPipeline, + pipelines.text2text_generation.TranslationPipeline, + ), + ): + data = pipeline(*data) + else: + data = pipeline(**data) + output = pipeline_info["postprocess"](data) + return output + + interface_info = pipeline_info.copy() + interface_info["fn"] = fn + del interface_info["preprocess"] + del interface_info["postprocess"] + + # define the title/description of the Interface + interface_info["title"] = pipeline.model.__class__.__name__ + + return interface_info diff --git a/testbed/gradio-app__gradio/gradio/processing_utils.py b/testbed/gradio-app__gradio/gradio/processing_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d2fd6292cffdb060017d39d572654f6a58f9ee4f --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/processing_utils.py @@ -0,0 +1,546 @@ +from __future__ import annotations + +import base64 +import json +import logging +import os +import shutil +import subprocess +import tempfile +import warnings +from io import BytesIO +from pathlib import Path + +import numpy as np +from gradio_client import utils as client_utils +from PIL import Image, ImageOps, PngImagePlugin + +from gradio import wasm_utils + +if not wasm_utils.IS_WASM: + # TODO: Support ffmpeg on Wasm + from ffmpy import FFmpeg, FFprobe, FFRuntimeError + +with warnings.catch_warnings(): + warnings.simplefilter("ignore") # Ignore pydub warning if ffmpeg is not installed + from pydub import AudioSegment + +log = logging.getLogger(__name__) + +######################### +# GENERAL +######################### + + +def to_binary(x: str | dict) -> bytes: + """Converts a base64 string or dictionary to a binary string that can be sent in a POST.""" + if isinstance(x, dict): + if x.get("data"): + base64str = x["data"] + else: + base64str = client_utils.encode_url_or_file_to_base64(x["name"]) + else: + base64str = x + return base64.b64decode(extract_base64_data(base64str)) + + +def extract_base64_data(x: str) -> str: + """Just extracts the base64 data from a general base64 string.""" + return x.rsplit(",", 1)[-1] + + +######################### +# IMAGE PRE-PROCESSING +######################### + + +def decode_base64_to_image(encoding: str) -> Image.Image: + image_encoded = extract_base64_data(encoding) + img = Image.open(BytesIO(base64.b64decode(image_encoded))) + try: + if hasattr(ImageOps, "exif_transpose"): + img = ImageOps.exif_transpose(img) + except Exception: + log.warning( + "Failed to transpose image %s based on EXIF data.", + img, + exc_info=True, + ) + return img + + +def encode_plot_to_base64(plt): + with BytesIO() as output_bytes: + plt.savefig(output_bytes, format="png") + bytes_data = output_bytes.getvalue() + base64_str = str(base64.b64encode(bytes_data), "utf-8") + return "data:image/png;base64," + base64_str + + +def get_pil_metadata(pil_image): + # Copy any text-only metadata + metadata = PngImagePlugin.PngInfo() + for key, value in pil_image.info.items(): + if isinstance(key, str) and isinstance(value, str): + metadata.add_text(key, value) + + return metadata + + +def encode_pil_to_bytes(pil_image, format="png"): + with BytesIO() as output_bytes: + pil_image.save(output_bytes, format, pnginfo=get_pil_metadata(pil_image)) + return output_bytes.getvalue() + + +def encode_pil_to_base64(pil_image): + bytes_data = encode_pil_to_bytes(pil_image) + base64_str = str(base64.b64encode(bytes_data), "utf-8") + return "data:image/png;base64," + base64_str + + +def encode_array_to_base64(image_array): + with BytesIO() as output_bytes: + pil_image = Image.fromarray(_convert(image_array, np.uint8, force_copy=False)) + pil_image.save(output_bytes, "PNG") + bytes_data = output_bytes.getvalue() + base64_str = str(base64.b64encode(bytes_data), "utf-8") + return "data:image/png;base64," + base64_str + + +def resize_and_crop(img, size, crop_type="center"): + """ + Resize and crop an image to fit the specified size. + args: + size: `(width, height)` tuple. Pass `None` for either width or height + to only crop and resize the other. + crop_type: can be 'top', 'middle' or 'bottom', depending on this + value, the image will cropped getting the 'top/left', 'middle' or + 'bottom/right' of the image to fit the size. + raises: + ValueError: if an invalid `crop_type` is provided. + """ + if crop_type == "top": + center = (0, 0) + elif crop_type == "center": + center = (0.5, 0.5) + else: + raise ValueError + + resize = list(size) + if size[0] is None: + resize[0] = img.size[0] + if size[1] is None: + resize[1] = img.size[1] + return ImageOps.fit(img, resize, centering=center) # type: ignore + + +################## +# Audio +################## + + +def audio_from_file(filename, crop_min=0, crop_max=100): + try: + audio = AudioSegment.from_file(filename) + except FileNotFoundError as e: + isfile = Path(filename).is_file() + msg = ( + f"Cannot load audio from file: `{'ffprobe' if isfile else filename}` not found." + + " Please install `ffmpeg` in your system to use non-WAV audio file formats" + " and make sure `ffprobe` is in your PATH." + if isfile + else "" + ) + raise RuntimeError(msg) from e + if crop_min != 0 or crop_max != 100: + audio_start = len(audio) * crop_min / 100 + audio_end = len(audio) * crop_max / 100 + audio = audio[audio_start:audio_end] + data = np.array(audio.get_array_of_samples()) + if audio.channels > 1: + data = data.reshape(-1, audio.channels) + return audio.frame_rate, data + + +def audio_to_file(sample_rate, data, filename, format="wav"): + if format == "wav": + data = convert_to_16_bit_wav(data) + audio = AudioSegment( + data.tobytes(), + frame_rate=sample_rate, + sample_width=data.dtype.itemsize, + channels=(1 if len(data.shape) == 1 else data.shape[1]), + ) + file = audio.export(filename, format=format) + file.close() # type: ignore + + +def convert_to_16_bit_wav(data): + # Based on: https://docs.scipy.org/doc/scipy/reference/generated/scipy.io.wavfile.write.html + warning = "Trying to convert audio automatically from {} to 16-bit int format." + if data.dtype in [np.float64, np.float32, np.float16]: + warnings.warn(warning.format(data.dtype)) + data = data / np.abs(data).max() + data = data * 32767 + data = data.astype(np.int16) + elif data.dtype == np.int32: + warnings.warn(warning.format(data.dtype)) + data = data / 65538 + data = data.astype(np.int16) + elif data.dtype == np.int16: + pass + elif data.dtype == np.uint16: + warnings.warn(warning.format(data.dtype)) + data = data - 32768 + data = data.astype(np.int16) + elif data.dtype == np.uint8: + warnings.warn(warning.format(data.dtype)) + data = data * 257 - 32768 + data = data.astype(np.int16) + else: + raise ValueError( + "Audio data cannot be converted automatically from " + f"{data.dtype} to 16-bit int format." + ) + return data + + +################## +# OUTPUT +################## + + +def _convert(image, dtype, force_copy=False, uniform=False): + """ + Adapted from: https://github.com/scikit-image/scikit-image/blob/main/skimage/util/dtype.py#L510-L531 + + Convert an image to the requested data-type. + Warnings are issued in case of precision loss, or when negative values + are clipped during conversion to unsigned integer types (sign loss). + Floating point values are expected to be normalized and will be clipped + to the range [0.0, 1.0] or [-1.0, 1.0] when converting to unsigned or + signed integers respectively. + Numbers are not shifted to the negative side when converting from + unsigned to signed integer types. Negative values will be clipped when + converting to unsigned integers. + Parameters + ---------- + image : ndarray + Input image. + dtype : dtype + Target data-type. + force_copy : bool, optional + Force a copy of the data, irrespective of its current dtype. + uniform : bool, optional + Uniformly quantize the floating point range to the integer range. + By default (uniform=False) floating point values are scaled and + rounded to the nearest integers, which minimizes back and forth + conversion errors. + .. versionchanged :: 0.15 + ``_convert`` no longer warns about possible precision or sign + information loss. See discussions on these warnings at: + https://github.com/scikit-image/scikit-image/issues/2602 + https://github.com/scikit-image/scikit-image/issues/543#issuecomment-208202228 + https://github.com/scikit-image/scikit-image/pull/3575 + References + ---------- + .. [1] DirectX data conversion rules. + https://msdn.microsoft.com/en-us/library/windows/desktop/dd607323%28v=vs.85%29.aspx + .. [2] Data Conversions. In "OpenGL ES 2.0 Specification v2.0.25", + pp 7-8. Khronos Group, 2010. + .. [3] Proper treatment of pixels as integers. A.W. Paeth. + In "Graphics Gems I", pp 249-256. Morgan Kaufmann, 1990. + .. [4] Dirty Pixels. J. Blinn. In "Jim Blinn's corner: Dirty Pixels", + pp 47-57. Morgan Kaufmann, 1998. + """ + dtype_range = { + bool: (False, True), + np.bool_: (False, True), + np.bool8: (False, True), # type: ignore + float: (-1, 1), + np.float_: (-1, 1), + np.float16: (-1, 1), + np.float32: (-1, 1), + np.float64: (-1, 1), + } + + def _dtype_itemsize(itemsize, *dtypes): + """Return first of `dtypes` with itemsize greater than `itemsize` + Parameters + ---------- + itemsize: int + The data type object element size. + Other Parameters + ---------------- + *dtypes: + Any Object accepted by `np.dtype` to be converted to a data + type object + Returns + ------- + dtype: data type object + First of `dtypes` with itemsize greater than `itemsize`. + """ + return next(dt for dt in dtypes if np.dtype(dt).itemsize >= itemsize) + + def _dtype_bits(kind, bits, itemsize=1): + """Return dtype of `kind` that can store a `bits` wide unsigned int + Parameters: + kind: str + Data type kind. + bits: int + Desired number of bits. + itemsize: int + The data type object element size. + Returns + ------- + dtype: data type object + Data type of `kind` that can store a `bits` wide unsigned int + """ + + s = next( + i + for i in (itemsize,) + (2, 4, 8) + if bits < (i * 8) or (bits == (i * 8) and kind == "u") + ) + + return np.dtype(kind + str(s)) + + def _scale(a, n, m, copy=True): + """Scale an array of unsigned/positive integers from `n` to `m` bits. + Numbers can be represented exactly only if `m` is a multiple of `n`. + Parameters + ---------- + a : ndarray + Input image array. + n : int + Number of bits currently used to encode the values in `a`. + m : int + Desired number of bits to encode the values in `out`. + copy : bool, optional + If True, allocates and returns new array. Otherwise, modifies + `a` in place. + Returns + ------- + out : array + Output image array. Has the same kind as `a`. + """ + kind = a.dtype.kind + if n > m and a.max() < 2**m: + return a.astype(_dtype_bits(kind, m)) + elif n == m: + return a.copy() if copy else a + elif n > m: + # downscale with precision loss + if copy: + b = np.empty(a.shape, _dtype_bits(kind, m)) + np.floor_divide(a, 2 ** (n - m), out=b, dtype=a.dtype, casting="unsafe") + return b + else: + a //= 2 ** (n - m) + return a + elif m % n == 0: + # exact upscale to a multiple of `n` bits + if copy: + b = np.empty(a.shape, _dtype_bits(kind, m)) + np.multiply(a, (2**m - 1) // (2**n - 1), out=b, dtype=b.dtype) + return b + else: + a = a.astype(_dtype_bits(kind, m, a.dtype.itemsize), copy=False) + a *= (2**m - 1) // (2**n - 1) + return a + else: + # upscale to a multiple of `n` bits, + # then downscale with precision loss + o = (m // n + 1) * n + if copy: + b = np.empty(a.shape, _dtype_bits(kind, o)) + np.multiply(a, (2**o - 1) // (2**n - 1), out=b, dtype=b.dtype) + b //= 2 ** (o - m) + return b + else: + a = a.astype(_dtype_bits(kind, o, a.dtype.itemsize), copy=False) + a *= (2**o - 1) // (2**n - 1) + a //= 2 ** (o - m) + return a + + image = np.asarray(image) + dtypeobj_in = image.dtype + dtypeobj_out = np.dtype("float64") if dtype is np.floating else np.dtype(dtype) + dtype_in = dtypeobj_in.type + dtype_out = dtypeobj_out.type + kind_in = dtypeobj_in.kind + kind_out = dtypeobj_out.kind + itemsize_in = dtypeobj_in.itemsize + itemsize_out = dtypeobj_out.itemsize + + # Below, we do an `issubdtype` check. Its purpose is to find out + # whether we can get away without doing any image conversion. This happens + # when: + # + # - the output and input dtypes are the same or + # - when the output is specified as a type, and the input dtype + # is a subclass of that type (e.g. `np.floating` will allow + # `float32` and `float64` arrays through) + + if np.issubdtype(dtype_in, np.obj2sctype(dtype)): + if force_copy: + image = image.copy() + return image + + if kind_in in "ui": + imin_in = np.iinfo(dtype_in).min + imax_in = np.iinfo(dtype_in).max + if kind_out in "ui": + imin_out = np.iinfo(dtype_out).min # type: ignore + imax_out = np.iinfo(dtype_out).max # type: ignore + + # any -> binary + if kind_out == "b": + return image > dtype_in(dtype_range[dtype_in][1] / 2) + + # binary -> any + if kind_in == "b": + result = image.astype(dtype_out) + if kind_out != "f": + result *= dtype_out(dtype_range[dtype_out][1]) + return result + + # float -> any + if kind_in == "f": + if kind_out == "f": + # float -> float + return image.astype(dtype_out) + + if np.min(image) < -1.0 or np.max(image) > 1.0: + raise ValueError("Images of type float must be between -1 and 1.") + # floating point -> integer + # use float type that can represent output integer type + computation_type = _dtype_itemsize( + itemsize_out, dtype_in, np.float32, np.float64 + ) + + if not uniform: + if kind_out == "u": + image_out = np.multiply(image, imax_out, dtype=computation_type) # type: ignore + else: + image_out = np.multiply( + image, (imax_out - imin_out) / 2, dtype=computation_type # type: ignore + ) + image_out -= 1.0 / 2.0 + np.rint(image_out, out=image_out) + np.clip(image_out, imin_out, imax_out, out=image_out) # type: ignore + elif kind_out == "u": + image_out = np.multiply(image, imax_out + 1, dtype=computation_type) # type: ignore + np.clip(image_out, 0, imax_out, out=image_out) # type: ignore + else: + image_out = np.multiply( + image, (imax_out - imin_out + 1.0) / 2.0, dtype=computation_type # type: ignore + ) + np.floor(image_out, out=image_out) + np.clip(image_out, imin_out, imax_out, out=image_out) # type: ignore + return image_out.astype(dtype_out) + + # signed/unsigned int -> float + if kind_out == "f": + # use float type that can exactly represent input integers + computation_type = _dtype_itemsize( + itemsize_in, dtype_out, np.float32, np.float64 + ) + + if kind_in == "u": + # using np.divide or np.multiply doesn't copy the data + # until the computation time + image = np.multiply(image, 1.0 / imax_in, dtype=computation_type) # type: ignore + # DirectX uses this conversion also for signed ints + # if imin_in: + # np.maximum(image, -1.0, out=image) + else: + image = np.add(image, 0.5, dtype=computation_type) + image *= 2 / (imax_in - imin_in) # type: ignore + + return np.asarray(image, dtype_out) + + # unsigned int -> signed/unsigned int + if kind_in == "u": + if kind_out == "i": + # unsigned int -> signed int + image = _scale(image, 8 * itemsize_in, 8 * itemsize_out - 1) + return image.view(dtype_out) + else: + # unsigned int -> unsigned int + return _scale(image, 8 * itemsize_in, 8 * itemsize_out) + + # signed int -> unsigned int + if kind_out == "u": + image = _scale(image, 8 * itemsize_in - 1, 8 * itemsize_out) + result = np.empty(image.shape, dtype_out) + np.maximum(image, 0, out=result, dtype=image.dtype, casting="unsafe") + return result + + # signed int -> signed int + if itemsize_in > itemsize_out: + return _scale(image, 8 * itemsize_in - 1, 8 * itemsize_out - 1) + + image = image.astype(_dtype_bits("i", itemsize_out * 8)) + image -= imin_in # type: ignore + image = _scale(image, 8 * itemsize_in, 8 * itemsize_out, copy=False) + image += imin_out # type: ignore + return image.astype(dtype_out) + + +def ffmpeg_installed() -> bool: + if wasm_utils.IS_WASM: + # TODO: Support ffmpeg in WASM + return False + + return shutil.which("ffmpeg") is not None + + +def video_is_playable(video_filepath: str) -> bool: + """Determines if a video is playable in the browser. + + A video is playable if it has a playable container and codec. + .mp4 -> h264 + .webm -> vp9 + .ogg -> theora + """ + try: + container = Path(video_filepath).suffix.lower() + probe = FFprobe( + global_options="-show_format -show_streams -select_streams v -print_format json", + inputs={video_filepath: None}, + ) + output = probe.run(stderr=subprocess.PIPE, stdout=subprocess.PIPE) + output = json.loads(output[0]) + video_codec = output["streams"][0]["codec_name"] + return (container, video_codec) in [ + (".mp4", "h264"), + (".ogg", "theora"), + (".webm", "vp9"), + ] + # If anything goes wrong, assume the video can be played to not convert downstream + except (FFRuntimeError, IndexError, KeyError): + return True + + +def convert_video_to_playable_mp4(video_path: str) -> str: + """Convert the video to mp4. If something goes wrong return the original video.""" + try: + with tempfile.NamedTemporaryFile(delete=False) as tmp_file: + output_path = Path(video_path).with_suffix(".mp4") + shutil.copy2(video_path, tmp_file.name) + # ffmpeg will automatically use h264 codec (playable in browser) when converting to mp4 + ff = FFmpeg( + inputs={str(tmp_file.name): None}, + outputs={str(output_path): None}, + global_options="-y -loglevel quiet", + ) + ff.run() + except FFRuntimeError as e: + print(f"Error converting video to browser-playable format {str(e)}") + output_path = video_path + finally: + # Remove temp file + os.remove(tmp_file.name) # type: ignore + return str(output_path) diff --git a/testbed/gradio-app__gradio/gradio/queueing.py b/testbed/gradio-app__gradio/gradio/queueing.py new file mode 100644 index 0000000000000000000000000000000000000000..432f7f8acc8301b586b34a94056dc602f2fcd903 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/queueing.py @@ -0,0 +1,561 @@ +from __future__ import annotations + +import asyncio +import copy +import json +import time +import traceback +from asyncio import TimeoutError as AsyncTimeOutError +from collections import deque +from typing import Any + +import fastapi +from typing_extensions import Literal + +from gradio import route_utils, routes +from gradio.data_classes import ( + Estimation, + LogMessage, + PredictBody, + Progress, + ProgressUnit, +) +from gradio.exceptions import Error +from gradio.helpers import TrackedIterable +from gradio.utils import run_coro_in_background, safe_get_lock, set_task_name + + +class Event: + def __init__( + self, + websocket: fastapi.WebSocket, + session_hash: str, + fn_index: int, + ): + self.websocket = websocket + self.session_hash: str = session_hash + self.fn_index: int = fn_index + self._id = f"{self.session_hash}_{self.fn_index}" + self.data: PredictBody | None = None + self.lost_connection_time: float | None = None + self.username: str | None = None + self.progress: Progress | None = None + self.progress_pending: bool = False + self.log_messages: deque[LogMessage] = deque() + + async def disconnect(self, code: int = 1000): + await self.websocket.close(code=code) + + +class Queue: + def __init__( + self, + live_updates: bool, + concurrency_count: int, + update_intervals: float, + max_size: int | None, + blocks_dependencies: list, + ): + self.event_queue: deque[Event] = deque() + self.events_pending_reconnection = [] + self.stopped = False + self.max_thread_count = concurrency_count + self.update_intervals = update_intervals + self.active_jobs: list[None | list[Event]] = [None] * concurrency_count + self.delete_lock = safe_get_lock() + self.server_app = None + self.duration_history_total = 0 + self.duration_history_count = 0 + self.avg_process_time = 0 + self.avg_concurrent_process_time = None + self.queue_duration = 1 + self.live_updates = live_updates + self.sleep_when_free = 0.05 + self.progress_update_sleep_when_free = 0.1 + self.max_size = max_size + self.blocks_dependencies = blocks_dependencies + self.continuous_tasks: list[Event] = [] + + def start(self): + run_coro_in_background(self.start_processing) + run_coro_in_background(self.start_log_and_progress_updates) + if not self.live_updates: + run_coro_in_background(self.notify_clients) + + def close(self): + self.stopped = True + + def resume(self): + self.stopped = False + + def set_server_app(self, app: routes.App): + self.server_app = app + + def get_active_worker_count(self) -> int: + count = 0 + for worker in self.active_jobs: + if worker is not None: + count += 1 + return count + + def get_events_in_batch(self) -> tuple[list[Event] | None, bool]: + if not (self.event_queue): + return None, False + + first_event = self.event_queue.popleft() + events = [first_event] + + event_fn_index = first_event.fn_index + batch = self.blocks_dependencies[event_fn_index]["batch"] + + if batch: + batch_size = self.blocks_dependencies[event_fn_index]["max_batch_size"] + rest_of_batch = [ + event for event in self.event_queue if event.fn_index == event_fn_index + ][: batch_size - 1] + events.extend(rest_of_batch) + [self.event_queue.remove(event) for event in rest_of_batch] + + return events, batch + + async def start_processing(self) -> None: + while not self.stopped: + if not self.event_queue: + await asyncio.sleep(self.sleep_when_free) + continue + + if None not in self.active_jobs: + await asyncio.sleep(self.sleep_when_free) + continue + # Using mutex to avoid editing a list in use + async with self.delete_lock: + events, batch = self.get_events_in_batch() + + if events: + self.active_jobs[self.active_jobs.index(None)] = events + task = run_coro_in_background(self.process_events, events, batch) + run_coro_in_background(self.broadcast_live_estimations) + set_task_name(task, events[0].session_hash, events[0].fn_index, batch) + + async def start_log_and_progress_updates(self) -> None: + while not self.stopped: + events = [ + evt for job in self.active_jobs if job is not None for evt in job + ] + self.continuous_tasks + + if len(events) == 0: + await asyncio.sleep(self.progress_update_sleep_when_free) + continue + + for event in events: + if event.progress_pending and event.progress: + event.progress_pending = False + client_awake = await self.send_message(event, event.progress.dict()) + if not client_awake: + await self.clean_event(event) + await self.send_log_updates_for_event(event) + + await asyncio.sleep(self.progress_update_sleep_when_free) + + async def send_log_updates_for_event(self, event: Event) -> None: + while True: + try: + message = event.log_messages.popleft() + except IndexError: + break + client_awake = await self.send_message(event, message.dict()) + if not client_awake: + await self.clean_event(event) + + def set_progress( + self, + event_id: str, + iterables: list[TrackedIterable] | None, + ): + if iterables is None: + return + for job in self.active_jobs: + if job is None: + continue + for evt in job: + if evt._id == event_id: + progress_data: list[ProgressUnit] = [] + for iterable in iterables: + progress_unit = ProgressUnit( + index=iterable.index, + length=iterable.length, + unit=iterable.unit, + progress=iterable.progress, + desc=iterable.desc, + ) + progress_data.append(progress_unit) + evt.progress = Progress(progress_data=progress_data) + evt.progress_pending = True + + def log_message( + self, + event_id: str, + log: str, + level: Literal["info", "warning"], + ): + events = [ + evt for job in self.active_jobs if job is not None for evt in job + ] + self.continuous_tasks + for event in events: + if event._id == event_id: + log_message = LogMessage( + log=log, + level=level, + ) + event.log_messages.append(log_message) + + def push(self, event: Event) -> int | None: + """ + Add event to queue, or return None if Queue is full + Parameters: + event: Event to add to Queue + Returns: + rank of submitted Event + """ + queue_len = len(self.event_queue) + if self.max_size is not None and queue_len >= self.max_size: + return None + self.event_queue.append(event) + return queue_len + + async def clean_event(self, event: Event) -> None: + if event in self.event_queue: + async with self.delete_lock: + self.event_queue.remove(event) + + async def broadcast_live_estimations(self) -> None: + """ + Runs 2 functions sequentially instead of concurrently. Otherwise dced clients are tried to get deleted twice. + """ + if self.live_updates: + await self.broadcast_estimations() + + async def gather_event_data(self, event: Event, receive_timeout=60) -> bool: + """ + Gather data for the event + Parameters: + event: the Event to gather data for + receive_timeout: how long to wait for data to be received from frontend + """ + if not event.data: + client_awake = await self.send_message(event, {"msg": "send_data"}) + if not client_awake: + return False + data, client_awake = await self.get_message(event, timeout=receive_timeout) + if not client_awake: + # In the event, we timeout due to large data size + # Let the client know, otherwise will hang + await self.send_message( + event, + { + "msg": "process_completed", + "output": {"error": "Time out uploading data to server"}, + "success": False, + }, + ) + return False + event.data = data + return True + + async def notify_clients(self) -> None: + """ + Notify clients about events statuses in the queue periodically. + """ + while not self.stopped: + await asyncio.sleep(self.update_intervals) + if self.event_queue: + await self.broadcast_estimations() + + async def broadcast_estimations(self) -> None: + estimation = self.get_estimation() + # Send all messages concurrently + await asyncio.gather( + *[ + self.send_estimation(event, estimation, rank) + for rank, event in enumerate(self.event_queue) + ] + ) + + async def send_estimation( + self, event: Event, estimation: Estimation, rank: int + ) -> Estimation: + """ + Send estimation about ETA to the client. + + Parameters: + event: + estimation: + rank: + """ + estimation.rank = rank + + if self.avg_concurrent_process_time is not None: + estimation.rank_eta = ( + estimation.rank * self.avg_concurrent_process_time + + self.avg_process_time + ) + if None not in self.active_jobs: + # Add estimated amount of time for a thread to get empty + estimation.rank_eta += self.avg_concurrent_process_time + client_awake = await self.send_message(event, estimation.dict()) + if not client_awake: + await self.clean_event(event) + return estimation + + def update_estimation(self, duration: float) -> None: + """ + Update estimation by last x element's average duration. + + Parameters: + duration: + """ + self.duration_history_total += duration + self.duration_history_count += 1 + self.avg_process_time = ( + self.duration_history_total / self.duration_history_count + ) + self.avg_concurrent_process_time = self.avg_process_time / min( + self.max_thread_count, self.duration_history_count + ) + self.queue_duration = self.avg_concurrent_process_time * len(self.event_queue) + + def get_estimation(self) -> Estimation: + return Estimation( + queue_size=len(self.event_queue), + avg_event_process_time=self.avg_process_time, + avg_event_concurrent_process_time=self.avg_concurrent_process_time, + queue_eta=self.queue_duration, + ) + + def get_request_params(self, websocket: fastapi.WebSocket) -> dict[str, Any]: + params = { + "url": str(websocket.url), + "headers": dict(websocket.headers), + "query_params": dict(websocket.query_params), + "path_params": dict(websocket.path_params), + "client": {"host": websocket.client.host, "port": websocket.client.port}, # type: ignore + } + try: + params[ + "session" + ] = websocket.session # forward OAuth information if available + except Exception: + pass + return params + + async def call_prediction(self, events: list[Event], batch: bool): + body = events[0].data + assert body is not None, "No event data" + username = events[0].username + body.event_id = events[0]._id if not batch else None + try: + body.request = self.get_request_params(events[0].websocket) + except ValueError: + pass + + if batch: + body.data = list(zip(*[event.data.data for event in events if event.data])) + body.request = [ + self.get_request_params(event.websocket) + for event in events + if event.data + ] + body.batched = True + + app = self.server_app + if app is None: + raise Exception("Server app has not been set.") + api_name = "predict" + + fn_index_inferred = route_utils.infer_fn_index( + app=app, api_name=api_name, body=body + ) + + gr_request = route_utils.compile_gr_request( + app=app, + body=body, + fn_index_inferred=fn_index_inferred, + username=username, + request=None, + ) + + try: + output = await route_utils.call_process_api( + app=app, + body=body, + gr_request=gr_request, + fn_index_inferred=fn_index_inferred, + ) + except Exception as error: + show_error = app.get_blocks().show_error or isinstance(error, Error) + traceback.print_exc() + raise Exception(str(error) if show_error else None) from error + + # To emulate the HTTP response from the predict API, + # convert the output to a JSON response string. + # This is done by FastAPI automatically in the HTTP endpoint handlers, + # but we need to do it manually here. + response_class = app.router.default_response_class + if isinstance(response_class, fastapi.datastructures.DefaultPlaceholder): + actual_response_class = response_class.value + else: + actual_response_class = response_class + http_response = actual_response_class( + output + ) # Do the same as https://github.com/tiangolo/fastapi/blob/0.87.0/fastapi/routing.py#L264 + # Also, decode the JSON string to a Python object, emulating the HTTP client behavior e.g. the `json()` method of `httpx`. + response_json = json.loads(http_response.body.decode()) + + return response_json + + async def process_events(self, events: list[Event], batch: bool) -> None: + awake_events: list[Event] = [] + try: + for event in events: + client_awake = await self.gather_event_data(event) + if client_awake: + client_awake = await self.send_message( + event, {"msg": "process_starts"} + ) + if client_awake: + awake_events.append(event) + if not awake_events: + return + begin_time = time.time() + try: + response = await self.call_prediction(awake_events, batch) + err = None + except Exception as e: + response = None + err = e + for event in awake_events: + await self.send_message( + event, + { + "msg": "process_completed", + "output": { + "error": None + if len(e.args) and e.args[0] is None + else str(e) + }, + "success": False, + }, + ) + if response and response.get("is_generating", False): + old_response = response + old_err = err + while response and response.get("is_generating", False): + old_response = response + old_err = err + open_ws = [] + for event in awake_events: + open = await self.send_message( + event, + { + "msg": "process_generating", + "output": old_response, + "success": old_response is not None, + }, + ) + open_ws.append(open) + awake_events = [ + e for e, is_open in zip(awake_events, open_ws) if is_open + ] + if not awake_events: + return + try: + response = await self.call_prediction(awake_events, batch) + err = None + except Exception as e: + response = None + err = e + for event in awake_events: + if response is None: + relevant_response = err + else: + relevant_response = old_response or old_err + await self.send_log_updates_for_event(event) + await self.send_message( + event, + { + "msg": "process_completed", + "output": {"error": str(relevant_response)} + if isinstance(relevant_response, Exception) + else relevant_response, + "success": relevant_response + and not isinstance(relevant_response, Exception), + }, + ) + elif response: + output = copy.deepcopy(response) + for e, event in enumerate(awake_events): + if batch and "data" in output: + output["data"] = list(zip(*response.get("data")))[e] + await self.send_log_updates_for_event( + event + ) # clean out pending log updates first + await self.send_message( + event, + { + "msg": "process_completed", + "output": output, + "success": response is not None, + }, + ) + end_time = time.time() + if response is not None: + self.update_estimation(end_time - begin_time) + except Exception as e: + print(e) + finally: + for event in awake_events: + try: + await event.disconnect() + except Exception: + pass + self.active_jobs[self.active_jobs.index(events)] = None + for event in events: + await self.clean_event(event) + # Always reset the state of the iterator + # If the job finished successfully, this has no effect + # If the job is cancelled, this will enable future runs + # to start "from scratch" + await self.reset_iterators(event.session_hash, event.fn_index) + + async def send_message(self, event, data: dict, timeout: float | int = 1) -> bool: + try: + await asyncio.wait_for( + event.websocket.send_json(data=data), timeout=timeout + ) + return True + except Exception: + await self.clean_event(event) + return False + + async def get_message(self, event, timeout=5) -> tuple[PredictBody | None, bool]: + try: + data = await asyncio.wait_for( + event.websocket.receive_json(), timeout=timeout + ) + return PredictBody(**data), True + except AsyncTimeOutError: + await self.clean_event(event) + return None, False + + async def reset_iterators(self, session_hash: str, fn_index: int): + # Do the same thing as the /reset route + app = self.server_app + if app is None: + raise Exception("Server app has not been set.") + if session_hash not in app.iterators: + # Failure, but don't raise an error + return + async with app.lock: + app.iterators[session_hash][fn_index] = None + app.iterators_to_reset[session_hash].add(fn_index) + return diff --git a/testbed/gradio-app__gradio/gradio/ranged_response.py b/testbed/gradio-app__gradio/gradio/ranged_response.py new file mode 100644 index 0000000000000000000000000000000000000000..88eb696184e56f683f8feabbf895a1bd6346a667 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/ranged_response.py @@ -0,0 +1,185 @@ +# Taken from https://gist.github.com/kevinastone/a6a62db57577b3f24e8a6865ed311463 +# Context: https://github.com/encode/starlette/pull/1090 +from __future__ import annotations + +import os +import re +import stat +from typing import NamedTuple +from urllib.parse import quote + +import aiofiles +from aiofiles.os import stat as aio_stat +from starlette.datastructures import Headers +from starlette.exceptions import HTTPException +from starlette.responses import Response, guess_type +from starlette.staticfiles import StaticFiles +from starlette.types import Receive, Scope, Send + +RANGE_REGEX = re.compile(r"^bytes=(?P\d+)-(?P\d*)$") + + +class ClosedRange(NamedTuple): + start: int + end: int + + def __len__(self) -> int: + return self.end - self.start + 1 + + def __bool__(self) -> bool: + return len(self) > 0 + + +class OpenRange(NamedTuple): + start: int + end: int | None = None + + def clamp(self, start: int, end: int) -> ClosedRange: + begin = max(self.start, start) + end = min(x for x in (self.end, end) if x) + + begin = min(begin, end) + end = max(begin, end) + + return ClosedRange(begin, end) + + +class RangedFileResponse(Response): + chunk_size = 4096 + + def __init__( + self, + path: str | os.PathLike, + range: OpenRange, + headers: dict[str, str] | None = None, + media_type: str | None = None, + filename: str | None = None, + stat_result: os.stat_result | None = None, + method: str | None = None, + ) -> None: + assert aiofiles is not None, "'aiofiles' must be installed to use FileResponse" + self.path = path + self.range = range + self.filename = filename + self.background = None + self.send_header_only = method is not None and method.upper() == "HEAD" + if media_type is None: + media_type = guess_type(filename or path)[0] or "text/plain" + self.media_type = media_type + self.init_headers(headers or {}) + if self.filename is not None: + content_disposition_filename = quote(self.filename) + if content_disposition_filename != self.filename: + content_disposition = ( + f"attachment; filename*=utf-8''{content_disposition_filename}" + ) + else: + content_disposition = f'attachment; filename="{self.filename}"' + self.headers.setdefault("content-disposition", content_disposition) + self.stat_result = stat_result + + def set_range_headers(self, range: ClosedRange) -> None: + assert self.stat_result + total_length = self.stat_result.st_size + content_length = len(range) + self.headers[ + "content-range" + ] = f"bytes {range.start}-{range.end}/{total_length}" + self.headers["content-length"] = str(content_length) + pass + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if self.stat_result is None: + try: + stat_result = await aio_stat(self.path) + self.stat_result = stat_result + except FileNotFoundError as fnfe: + raise RuntimeError( + f"File at path {self.path} does not exist." + ) from fnfe + else: + mode = stat_result.st_mode + if not stat.S_ISREG(mode): + raise RuntimeError(f"File at path {self.path} is not a file.") + + byte_range = self.range.clamp(0, self.stat_result.st_size) + self.set_range_headers(byte_range) + + async with aiofiles.open(self.path, mode="rb") as file: + await file.seek(byte_range.start) + await send( + { + "type": "http.response.start", + "status": 206, + "headers": self.raw_headers, + } + ) + if self.send_header_only: + await send( + {"type": "http.response.body", "body": b"", "more_body": False} + ) + else: + remaining_bytes = len(byte_range) + + if not byte_range: + await send( + {"type": "http.response.body", "body": b"", "more_body": False} + ) + return + + while remaining_bytes > 0: + chunk_size = min(self.chunk_size, remaining_bytes) + chunk = await file.read(chunk_size) + remaining_bytes -= len(chunk) + await send( + { + "type": "http.response.body", + "body": chunk, + "more_body": remaining_bytes > 0, + } + ) + + +class RangedStaticFiles(StaticFiles): + def file_response( + self, + full_path: str | os.PathLike, + stat_result: os.stat_result, + scope: Scope, + status_code: int = 200, + ) -> Response: + request_headers = Headers(scope=scope) + + if request_headers.get("range"): + response = self.ranged_file_response( + full_path, stat_result=stat_result, scope=scope + ) + else: + response = super().file_response( + full_path, stat_result=stat_result, scope=scope, status_code=status_code + ) + response.headers["accept-ranges"] = "bytes" + return response + + def ranged_file_response( + self, + full_path: str | os.PathLike, + stat_result: os.stat_result, + scope: Scope, + ) -> Response: + method = scope["method"] + request_headers = Headers(scope=scope) + + range_header = request_headers["range"] + + match = RANGE_REGEX.search(range_header) + if not match: + raise HTTPException(400) + + start, end = match.group("start"), match.group("end") + + range = OpenRange(int(start), int(end) if end else None) + + return RangedFileResponse( + full_path, range, stat_result=stat_result, method=method + ) diff --git a/testbed/gradio-app__gradio/gradio/reload.py b/testbed/gradio-app__gradio/gradio/reload.py new file mode 100644 index 0000000000000000000000000000000000000000..c072c4c0e5f33efd76640dc1e1eb5589bbbf76a2 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/reload.py @@ -0,0 +1,104 @@ +""" + +Contains the functions that run when `gradio` is called from the command line. Specifically, allows + +$ gradio app.py, to run app.py in reload mode where any changes in the app.py file or Gradio library reloads the demo. +$ gradio app.py my_demo, to use variable names other than "demo" +""" +import inspect +import os +import re +import subprocess +import sys +import threading +from pathlib import Path + +import gradio +from gradio import utils + +reload_thread = threading.local() + + +def _setup_config(): + args = sys.argv[1:] + if len(args) == 0: + raise ValueError("No file specified.") + if len(args) == 1 or args[1].startswith("--"): + demo_name = "demo" + else: + demo_name = args[1] + if "." in demo_name: + demo_name = demo_name.split(".")[0] + print( + "\nWARNING: As of Gradio 3.41.0, the parameter after the file path must be the name of the Gradio demo, not the FastAPI app. In most cases, this just means you should remove '.app' after the name of your demo, e.g. 'demo.app' -> 'demo'." + ) + + original_path = args[0] + app_text = Path(original_path).read_text() + + patterns = [ + f"with gr\\.Blocks\\(\\) as {demo_name}", + f"{demo_name} = gr\\.Blocks", + f"{demo_name} = gr\\.Interface", + f"{demo_name} = gr\\.ChatInterface", + f"{demo_name} = gr\\.Series", + f"{demo_name} = gr\\.Paralles", + f"{demo_name} = gr\\.TabbedInterface", + ] + + if not any(re.search(p, app_text) for p in patterns): + print( + f"\nWarning: Cannot statically find a gradio demo called {demo_name}. " + "Reload work may fail." + ) + + abs_original_path = utils.abspath(original_path) + path = os.path.normpath(original_path) + path = path.replace("/", ".") + path = path.replace("\\", ".") + filename = os.path.splitext(path)[0] + + gradio_folder = Path(inspect.getfile(gradio)).parent + + message = "Watching:" + message_change_count = 0 + + watching_dirs = [] + if str(gradio_folder).strip(): + watching_dirs.append(gradio_folder) + message += f" '{gradio_folder}'" + message_change_count += 1 + + abs_parent = abs_original_path.parent + if str(abs_parent).strip(): + watching_dirs.append(abs_parent) + if message_change_count == 1: + message += "," + message += f" '{abs_parent}'" + + print(message + "\n") + + # guaranty access to the module of an app + sys.path.insert(0, os.getcwd()) + return filename, abs_original_path, [str(s) for s in watching_dirs], demo_name + + +def main(): + # default execution pattern to start the server and watch changes + filename, path, watch_dirs, demo_name = _setup_config() + args = sys.argv[1:] + extra_args = args[1:] if len(args) == 1 or args[1].startswith("--") else args[2:] + popen = subprocess.Popen( + ["python", path] + extra_args, + env=dict( + os.environ, + GRADIO_WATCH_DIRS=",".join(watch_dirs), + GRADIO_WATCH_FILE=filename, + GRADIO_WATCH_DEMO_NAME=demo_name, + ), + ) + popen.wait() + + +if __name__ == "__main__": + main() diff --git a/testbed/gradio-app__gradio/gradio/route_utils.py b/testbed/gradio-app__gradio/gradio/route_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b79058820dc455524952f74d48fe14171f6955d9 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/route_utils.py @@ -0,0 +1,246 @@ +from __future__ import annotations + +import json +from typing import TYPE_CHECKING, Optional, Union + +import fastapi +from gradio_client.documentation import document, set_documentation_group + +from gradio import utils +from gradio.data_classes import PredictBody +from gradio.exceptions import Error +from gradio.helpers import EventData +from gradio.state_holder import SessionState + +if TYPE_CHECKING: + from gradio.routes import App + +set_documentation_group("routes") + + +class Obj: + """ + Using a class to convert dictionaries into objects. Used by the `Request` class. + Credit: https://www.geeksforgeeks.org/convert-nested-python-dictionary-to-object/ + """ + + def __init__(self, dict_): + self.__dict__.update(dict_) + for key, value in dict_.items(): + if isinstance(value, (dict, list)): + value = Obj(value) + setattr(self, key, value) + + def __getitem__(self, item): + return self.__dict__[item] + + def __setitem__(self, item, value): + self.__dict__[item] = value + + def __iter__(self): + for key, value in self.__dict__.items(): + if isinstance(value, Obj): + yield (key, dict(value)) + else: + yield (key, value) + + def __contains__(self, item) -> bool: + if item in self.__dict__: + return True + for value in self.__dict__.values(): + if isinstance(value, Obj) and item in value: + return True + return False + + def keys(self): + return self.__dict__.keys() + + def values(self): + return self.__dict__.values() + + def items(self): + return self.__dict__.items() + + def __str__(self) -> str: + return str(self.__dict__) + + def __repr__(self) -> str: + return str(self.__dict__) + + +@document() +class Request: + """ + A Gradio request object that can be used to access the request headers, cookies, + query parameters and other information about the request from within the prediction + function. The class is a thin wrapper around the fastapi.Request class. Attributes + of this class include: `headers`, `client`, `query_params`, and `path_params`. If + auth is enabled, the `username` attribute can be used to get the logged in user. + Example: + import gradio as gr + def echo(text, request: gr.Request): + if request: + print("Request headers dictionary:", request.headers) + print("IP address:", request.client.host) + print("Query parameters:", dict(request.query_params)) + return text + io = gr.Interface(echo, "textbox", "textbox").launch() + Demos: request_ip_headers + """ + + def __init__( + self, + request: fastapi.Request | None = None, + username: str | None = None, + **kwargs, + ): + """ + Can be instantiated with either a fastapi.Request or by manually passing in + attributes (needed for websocket-based queueing). + Parameters: + request: A fastapi.Request + """ + self.request = request + self.username = username + self.kwargs: dict = kwargs + + def dict_to_obj(self, d): + if isinstance(d, dict): + return json.loads(json.dumps(d), object_hook=Obj) + else: + return d + + def __getattr__(self, name): + if self.request: + return self.dict_to_obj(getattr(self.request, name)) + else: + try: + obj = self.kwargs[name] + except KeyError as ke: + raise AttributeError( + f"'Request' object has no attribute '{name}'" + ) from ke + return self.dict_to_obj(obj) + + +class FnIndexInferError(Exception): + pass + + +def infer_fn_index(app: App, api_name: str, body: PredictBody) -> int: + if body.fn_index is None: + for i, fn in enumerate(app.get_blocks().dependencies): + if fn["api_name"] == api_name: + return i + + raise FnIndexInferError(f"Could not infer fn_index for api_name {api_name}.") + else: + return body.fn_index + + +def compile_gr_request( + app: App, + body: PredictBody, + fn_index_inferred: int, + username: Optional[str], + request: Optional[fastapi.Request], +): + # If this fn_index cancels jobs, then the only input we need is the + # current session hash + if app.get_blocks().dependencies[fn_index_inferred]["cancels"]: + body.data = [body.session_hash] + if body.request: + if body.batched: + gr_request = [Request(username=username, **req) for req in body.request] + else: + assert isinstance(body.request, dict) + gr_request = Request(username=username, **body.request) + else: + if request is None: + raise ValueError("request must be provided if body.request is None") + gr_request = Request(username=username, request=request) + + return gr_request + + +def restore_session_state(app: App, body: PredictBody): + fn_index = body.fn_index + session_hash = getattr(body, "session_hash", None) + if session_hash is not None: + session_state = app.state_holder[session_hash] + # The should_reset set keeps track of the fn_indices + # that have been cancelled. When a job is cancelled, + # the /reset route will mark the jobs as having been reset. + # That way if the cancel job finishes BEFORE the job being cancelled + # the job being cancelled will not overwrite the state of the iterator. + if fn_index in app.iterators_to_reset[session_hash]: + iterators = {} + app.iterators_to_reset[session_hash].remove(fn_index) + else: + iterators = app.iterators[session_hash] + else: + session_state = SessionState(app.get_blocks()) + iterators = {} + + return session_state, iterators + + +async def call_process_api( + app: App, + body: PredictBody, + gr_request: Union[Request, list[Request]], + fn_index_inferred, +): + session_state, iterators = restore_session_state(app=app, body=body) + + dependency = app.get_blocks().dependencies[fn_index_inferred] + + target = dependency["targets"][0] if len(dependency["targets"]) else None + event_data = EventData( + app.get_blocks().blocks.get(target) if target else None, + body.event_data, + ) + + event_id = getattr(body, "event_id", None) + + fn_index = body.fn_index + session_hash = getattr(body, "session_hash", None) + inputs = body.data + + batch_in_single_out = not body.batched and dependency["batch"] + if batch_in_single_out: + inputs = [inputs] + + try: + with utils.MatplotlibBackendMananger(): + output = await app.get_blocks().process_api( + fn_index=fn_index_inferred, + inputs=inputs, + request=gr_request, + state=session_state, + iterators=iterators, + session_hash=session_hash, + event_id=event_id, + event_data=event_data, + in_event_listener=True, + ) + iterator = output.pop("iterator", None) + if hasattr(body, "session_hash"): + app.iterators[body.session_hash][fn_index] = iterator + if isinstance(output, Error): + raise output + except BaseException: + iterator = iterators.get(fn_index, None) + if iterator is not None: # close off any streams that are still open + run_id = id(iterator) + pending_streams: dict[int, list] = ( + app.get_blocks().pending_streams[session_hash].get(run_id, {}) + ) + for stream in pending_streams.values(): + stream.append(None) + raise + + if batch_in_single_out: + output["data"] = output["data"][0] + + return output diff --git a/testbed/gradio-app__gradio/gradio/routes.py b/testbed/gradio-app__gradio/gradio/routes.py new file mode 100644 index 0000000000000000000000000000000000000000..ff6886a4e87a23367f23d420c40ed81b0144804e --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/routes.py @@ -0,0 +1,725 @@ +"""Implements a FastAPI server to run the gradio interface. Note that some types in this +module use the Optional/Union notation so that they work correctly with pydantic.""" + +from __future__ import annotations + +import asyncio +import sys + +if sys.version_info >= (3, 9): + from importlib.resources import files +else: + from importlib_resources import files +import inspect +import mimetypes +import os +import posixpath +import secrets +import tempfile +import threading +import time +import traceback +from asyncio import TimeoutError as AsyncTimeOutError +from collections import defaultdict +from pathlib import Path +from typing import Any, Dict, List, Optional, Type + +import fastapi +import httpx +import markupsafe +import orjson +from fastapi import Depends, FastAPI, File, HTTPException, UploadFile, WebSocket, status +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import ( + FileResponse, + HTMLResponse, + JSONResponse, + PlainTextResponse, +) +from fastapi.security import OAuth2PasswordRequestForm +from fastapi.templating import Jinja2Templates +from gradio_client.documentation import document, set_documentation_group +from jinja2.exceptions import TemplateNotFound +from starlette.background import BackgroundTask +from starlette.responses import RedirectResponse, StreamingResponse +from starlette.websockets import WebSocketState + +import gradio +import gradio.ranged_response as ranged_response +from gradio import route_utils, utils, wasm_utils +from gradio.context import Context +from gradio.data_classes import PredictBody, ResetBody +from gradio.deprecation import warn_deprecation +from gradio.exceptions import Error +from gradio.oauth import attach_oauth +from gradio.queueing import Estimation, Event +from gradio.route_utils import Request # noqa: F401 +from gradio.state_holder import StateHolder +from gradio.utils import ( + cancel_tasks, + get_package_version, + run_coro_in_background, + set_task_name, +) + +mimetypes.init() + +STATIC_TEMPLATE_LIB = files("gradio").joinpath("templates").as_posix() # type: ignore +STATIC_PATH_LIB = files("gradio").joinpath("templates", "frontend", "static").as_posix() # type: ignore +BUILD_PATH_LIB = files("gradio").joinpath("templates", "frontend", "assets").as_posix() # type: ignore +VERSION = get_package_version() + + +class ORJSONResponse(JSONResponse): + media_type = "application/json" + + @staticmethod + def _render(content: Any) -> bytes: + return orjson.dumps( + content, + option=orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_PASSTHROUGH_DATETIME, + default=str, + ) + + def render(self, content: Any) -> bytes: + return ORJSONResponse._render(content) + + @staticmethod + def _render_str(content: Any) -> str: + return ORJSONResponse._render(content).decode("utf-8") + + +def toorjson(value): + return markupsafe.Markup( + ORJSONResponse._render_str(value) + .replace("<", "\\u003c") + .replace(">", "\\u003e") + .replace("&", "\\u0026") + .replace("'", "\\u0027") + ) + + +templates = Jinja2Templates(directory=STATIC_TEMPLATE_LIB) +templates.env.filters["toorjson"] = toorjson + +client = httpx.AsyncClient() + + +class App(FastAPI): + """ + FastAPI App Wrapper + """ + + def __init__(self, **kwargs): + self.tokens = {} + self.auth = None + self.blocks: gradio.Blocks | None = None + self.state_holder = StateHolder() + self.iterators = defaultdict(dict) + self.iterators_to_reset = defaultdict(set) + self.lock = utils.safe_get_lock() + self.cookie_id = secrets.token_urlsafe(32) + self.queue_token = secrets.token_urlsafe(32) + self.startup_events_triggered = False + self.uploaded_file_dir = os.environ.get("GRADIO_TEMP_DIR") or str( + Path(tempfile.gettempdir()) / "gradio" + ) + self.change_event: None | threading.Event = None + # Allow user to manually set `docs_url` and `redoc_url` + # when instantiating an App; when they're not set, disable docs and redoc. + kwargs.setdefault("docs_url", None) + kwargs.setdefault("redoc_url", None) + super().__init__(**kwargs) + + def configure_app(self, blocks: gradio.Blocks) -> None: + auth = blocks.auth + if auth is not None: + if not callable(auth): + self.auth = {account[0]: account[1] for account in auth} + else: + self.auth = auth + else: + self.auth = None + + self.blocks = blocks + self.cwd = os.getcwd() + self.favicon_path = blocks.favicon_path + self.tokens = {} + self.root_path = blocks.root_path + self.state_holder.set_blocks(blocks) + + def get_blocks(self) -> gradio.Blocks: + if self.blocks is None: + raise ValueError("No Blocks has been configured for this app.") + return self.blocks + + def build_proxy_request(self, url_path): + url = httpx.URL(url_path) + assert self.blocks + # Don't proxy a URL unless it's a URL specifically loaded by the user using + # gr.load() to prevent SSRF or harvesting of HF tokens by malicious Spaces. + is_safe_url = any( + url.host == httpx.URL(root).host for root in self.blocks.root_urls + ) + if not is_safe_url: + raise PermissionError("This URL cannot be proxied.") + is_hf_url = url.host.endswith(".hf.space") + headers = {} + if Context.hf_token is not None and is_hf_url: + headers["Authorization"] = f"Bearer {Context.hf_token}" + rp_req = client.build_request("GET", url, headers=headers) + return rp_req + + @staticmethod + def create_app( + blocks: gradio.Blocks, app_kwargs: Dict[str, Any] | None = None + ) -> App: + app_kwargs = app_kwargs or {} + app_kwargs.setdefault("default_response_class", ORJSONResponse) + app = App(**app_kwargs) + app.configure_app(blocks) + + if not wasm_utils.IS_WASM: + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["*"], + allow_headers=["*"], + ) + + @app.get("/user") + @app.get("/user/") + def get_current_user(request: fastapi.Request) -> Optional[str]: + token = request.cookies.get( + f"access-token-{app.cookie_id}" + ) or request.cookies.get(f"access-token-unsecure-{app.cookie_id}") + return app.tokens.get(token) + + @app.get("/login_check") + @app.get("/login_check/") + def login_check(user: str = Depends(get_current_user)): + if app.auth is None or user is not None: + return + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Not authenticated" + ) + + async def ws_login_check(websocket: WebSocket) -> Optional[str]: + token = websocket.cookies.get( + f"access-token-{app.cookie_id}" + ) or websocket.cookies.get(f"access-token-unsecure-{app.cookie_id}") + return token # token is returned to authenticate the websocket connection in the endpoint handler. + + @app.get("/token") + @app.get("/token/") + def get_token(request: fastapi.Request) -> dict: + token = request.cookies.get(f"access-token-{app.cookie_id}") + return {"token": token, "user": app.tokens.get(token)} + + @app.get("/app_id") + @app.get("/app_id/") + def app_id(request: fastapi.Request) -> dict: + return {"app_id": app.get_blocks().app_id} + + async def send_ping_periodically(websocket: WebSocket): + while True: + await websocket.send_text("PING") + await asyncio.sleep(1) + + async def listen_for_changes(websocket: WebSocket): + assert app.change_event + while True: + if app.change_event.is_set(): + await websocket.send_text("CHANGE") + app.change_event.clear() + await asyncio.sleep(0.1) # Short sleep to not make this a tight loop + + @app.websocket("/dev/reload") + async def notify_changes(websocket: WebSocket): + await websocket.accept() + + ping = asyncio.create_task(send_ping_periodically(websocket)) + notify = asyncio.create_task(listen_for_changes(websocket)) + tasks = {ping, notify} + ping.add_done_callback(tasks.remove) + notify.add_done_callback(tasks.remove) + done, pending = await asyncio.wait( + [ping, notify], + return_when=asyncio.FIRST_COMPLETED, + ) + + for task in pending: + task.cancel() + + if any(isinstance(task.exception(), Exception) for task in done): + await websocket.close() + + @app.post("/login") + @app.post("/login/") + def login(form_data: OAuth2PasswordRequestForm = Depends()): + username, password = form_data.username.strip(), form_data.password + if app.auth is None: + return RedirectResponse(url="/", status_code=status.HTTP_302_FOUND) + if ( + not callable(app.auth) + and username in app.auth + and app.auth[username] == password + ) or (callable(app.auth) and app.auth.__call__(username, password)): + token = secrets.token_urlsafe(16) + app.tokens[token] = username + response = JSONResponse(content={"success": True}) + response.set_cookie( + key=f"access-token-{app.cookie_id}", + value=token, + httponly=True, + samesite="none", + secure=True, + ) + response.set_cookie( + key=f"access-token-unsecure-{app.cookie_id}", + value=token, + httponly=True, + ) + return response + else: + raise HTTPException(status_code=400, detail="Incorrect credentials.") + + ############### + # OAuth Routes + ############### + + # Define OAuth routes if the app expects it (i.e. a LoginButton is defined). + # It allows users to "Sign in with HuggingFace". + if app.blocks is not None and app.blocks.expects_oauth: + attach_oauth(app) + + ############### + # Main Routes + ############### + + @app.head("/", response_class=HTMLResponse) + @app.get("/", response_class=HTMLResponse) + def main(request: fastapi.Request, user: str = Depends(get_current_user)): + mimetypes.add_type("application/javascript", ".js") + blocks = app.get_blocks() + root_path = request.scope.get("root_path", "") + + if app.auth is None or user is not None: + config = app.get_blocks().config + config["root"] = root_path + else: + config = { + "auth_required": True, + "auth_message": blocks.auth_message, + "space_id": app.get_blocks().space_id, + "root": root_path, + } + + try: + template = ( + "frontend/share.html" if blocks.share else "frontend/index.html" + ) + return templates.TemplateResponse( + template, + {"request": request, "config": config}, + ) + except TemplateNotFound as err: + if blocks.share: + raise ValueError( + "Did you install Gradio from source files? Share mode only " + "works when Gradio is installed through the pip package." + ) from err + else: + raise ValueError( + "Did you install Gradio from source files? You need to build " + "the frontend by running /scripts/build_frontend.sh" + ) from err + + @app.get("/info/", dependencies=[Depends(login_check)]) + @app.get("/info", dependencies=[Depends(login_check)]) + def api_info(serialize: bool = True): + config = app.get_blocks().config + return gradio.blocks.get_api_info(config, serialize) # type: ignore + + @app.get("/config/", dependencies=[Depends(login_check)]) + @app.get("/config", dependencies=[Depends(login_check)]) + def get_config(request: fastapi.Request): + root_path = request.scope.get("root_path", "") + config = app.get_blocks().config + config["root"] = root_path + return config + + @app.get("/static/{path:path}") + def static_resource(path: str): + static_file = safe_join(STATIC_PATH_LIB, path) + return FileResponse(static_file) + + @app.get("/assets/{path:path}") + def build_resource(path: str): + build_file = safe_join(BUILD_PATH_LIB, path) + return FileResponse(build_file) + + @app.get("/favicon.ico") + async def favicon(): + blocks = app.get_blocks() + if blocks.favicon_path is None: + return static_resource("img/logo.svg") + else: + return FileResponse(blocks.favicon_path) + + @app.head("/proxy={url_path:path}", dependencies=[Depends(login_check)]) + @app.get("/proxy={url_path:path}", dependencies=[Depends(login_check)]) + async def reverse_proxy(url_path: str): + # Adapted from: https://github.com/tiangolo/fastapi/issues/1788 + try: + rp_req = app.build_proxy_request(url_path) + except PermissionError as err: + raise HTTPException(status_code=400, detail=str(err)) from err + rp_resp = await client.send(rp_req, stream=True) + return StreamingResponse( + rp_resp.aiter_raw(), + status_code=rp_resp.status_code, + headers=rp_resp.headers, # type: ignore + background=BackgroundTask(rp_resp.aclose), + ) + + @app.head("/file={path_or_url:path}", dependencies=[Depends(login_check)]) + @app.get("/file={path_or_url:path}", dependencies=[Depends(login_check)]) + async def file(path_or_url: str, request: fastapi.Request): + blocks = app.get_blocks() + if utils.validate_url(path_or_url): + return RedirectResponse( + url=path_or_url, status_code=status.HTTP_302_FOUND + ) + + abs_path = utils.abspath(path_or_url) + + in_blocklist = any( + utils.is_in_or_equal(abs_path, blocked_path) + for blocked_path in blocks.blocked_paths + ) + is_dotfile = any(part.startswith(".") for part in abs_path.parts) + is_dir = abs_path.is_dir() + + if in_blocklist or is_dotfile or is_dir: + raise HTTPException(403, f"File not allowed: {path_or_url}.") + + in_app_dir = utils.is_in_or_equal(abs_path, app.cwd) + created_by_app = str(abs_path) in set().union(*blocks.temp_file_sets) + in_allowlist = any( + utils.is_in_or_equal(abs_path, allowed_path) + for allowed_path in blocks.allowed_paths + ) + was_uploaded = utils.is_in_or_equal(abs_path, app.uploaded_file_dir) + + if not (in_app_dir or created_by_app or in_allowlist or was_uploaded): + raise HTTPException(403, f"File not allowed: {path_or_url}.") + + if not abs_path.exists(): + raise HTTPException(404, f"File not found: {path_or_url}.") + + range_val = request.headers.get("Range", "").strip() + if range_val.startswith("bytes=") and "-" in range_val: + range_val = range_val[6:] + start, end = range_val.split("-") + if start.isnumeric() and end.isnumeric(): + start = int(start) + end = int(end) + response = ranged_response.RangedFileResponse( + abs_path, + ranged_response.OpenRange(start, end), + dict(request.headers), + stat_result=os.stat(abs_path), + ) + return response + return FileResponse(abs_path, headers={"Accept-Ranges": "bytes"}) + + @app.get( + "/stream/{session_hash}/{run}/{component_id}", + dependencies=[Depends(login_check)], + ) + async def stream( + session_hash: str, run: int, component_id: int, request: fastapi.Request + ): + stream: list = ( + app.get_blocks() + .pending_streams[session_hash] + .get(run, {}) + .get(component_id, None) + ) + if stream is None: + raise HTTPException(404, "Stream not found.") + + def stream_wrapper(): + check_stream_rate = 0.01 + max_wait_time = 120 # maximum wait between yields - assume generator thread has crashed otherwise. + wait_time = 0 + while True: + if len(stream) == 0: + if wait_time > max_wait_time: + return + wait_time += check_stream_rate + time.sleep(check_stream_rate) + continue + wait_time = 0 + next_stream = stream.pop(0) + if next_stream is None: + return + yield next_stream + + return StreamingResponse(stream_wrapper()) + + @app.get("/file/{path:path}", dependencies=[Depends(login_check)]) + async def file_deprecated(path: str, request: fastapi.Request): + return await file(path, request) + + @app.post("/reset/") + @app.post("/reset") + async def reset_iterator(body: ResetBody): + if body.session_hash not in app.iterators: + return {"success": False} + async with app.lock: + app.iterators[body.session_hash][body.fn_index] = None + app.iterators_to_reset[body.session_hash].add(body.fn_index) + return {"success": True} + + # had to use '/run' endpoint for Colab compatibility, '/api' supported for backwards compatibility + @app.post("/run/{api_name}", dependencies=[Depends(login_check)]) + @app.post("/run/{api_name}/", dependencies=[Depends(login_check)]) + @app.post("/api/{api_name}", dependencies=[Depends(login_check)]) + @app.post("/api/{api_name}/", dependencies=[Depends(login_check)]) + async def predict( + api_name: str, + body: PredictBody, + request: fastapi.Request, + username: str = Depends(get_current_user), + ): + fn_index_inferred = route_utils.infer_fn_index( + app=app, api_name=api_name, body=body + ) + + if not app.get_blocks().api_open and app.get_blocks().queue_enabled_for_fn( + fn_index_inferred + ): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + ) + + gr_request = route_utils.compile_gr_request( + app, + body, + fn_index_inferred=fn_index_inferred, + username=username, + request=request, + ) + + try: + output = await route_utils.call_process_api( + app=app, + body=body, + gr_request=gr_request, + fn_index_inferred=fn_index_inferred, + ) + except BaseException as error: + show_error = app.get_blocks().show_error or isinstance(error, Error) + traceback.print_exc() + return JSONResponse( + content={"error": str(error) if show_error else None}, + status_code=500, + ) + return output + + @app.websocket("/queue/join") + async def join_queue( + websocket: WebSocket, + token: Optional[str] = Depends(ws_login_check), + ): + blocks = app.get_blocks() + if app.auth is not None and token is None: + await websocket.close(code=status.WS_1008_POLICY_VIOLATION) + return + if blocks._queue.server_app is None: + blocks._queue.set_server_app(app) + await websocket.accept() + # In order to cancel jobs, we need the session_hash and fn_index + # to create a unique id for each job + try: + await asyncio.wait_for( + websocket.send_json({"msg": "send_hash"}), timeout=5 + ) + except AsyncTimeOutError: + return + + try: + session_info = await asyncio.wait_for( + websocket.receive_json(), timeout=5 + ) + except AsyncTimeOutError: + return + + event = Event( + websocket, session_info["session_hash"], session_info["fn_index"] + ) + # set the username into Event to allow using the same username for call_prediction + event.username = app.tokens.get(token) + event.session_hash = session_info["session_hash"] + + # Continuous events are not put in the queue so that they do not + # occupy the queue's resource as they are expected to run forever + if blocks.dependencies[event.fn_index].get("every", 0): + await cancel_tasks({f"{event.session_hash}_{event.fn_index}"}) + await blocks._queue.reset_iterators(event.session_hash, event.fn_index) + blocks._queue.continuous_tasks.append(event) + task = run_coro_in_background( + blocks._queue.process_events, [event], False + ) + set_task_name(task, event.session_hash, event.fn_index, batch=False) + else: + rank = blocks._queue.push(event) + + if rank is None: + await blocks._queue.send_message(event, {"msg": "queue_full"}) + await event.disconnect() + return + estimation = blocks._queue.get_estimation() + await blocks._queue.send_estimation(event, estimation, rank) + while True: + await asyncio.sleep(1) + if websocket.application_state == WebSocketState.DISCONNECTED: + return + + @app.get( + "/queue/status", + dependencies=[Depends(login_check)], + response_model=Estimation, + ) + async def get_queue_status(): + return app.get_blocks()._queue.get_estimation() + + @app.post("/upload", dependencies=[Depends(login_check)]) + async def upload_file( + files: List[UploadFile] = File(...), + ): + output_files = [] + file_manager = gradio.File() + for input_file in files: + output_files.append( + await file_manager.save_uploaded_file( + input_file, app.uploaded_file_dir + ) + ) + return output_files + + @app.on_event("startup") + @app.get("/startup-events") + async def startup_events(): + if not app.startup_events_triggered: + app.get_blocks().startup_events() + app.startup_events_triggered = True + return True + return False + + @app.get("/theme.css", response_class=PlainTextResponse) + def theme_css(): + return PlainTextResponse(app.get_blocks().theme_css, media_type="text/css") + + @app.get("/robots.txt", response_class=PlainTextResponse) + def robots_txt(): + if app.get_blocks().share: + return "User-agent: *\nDisallow: /" + else: + return "User-agent: *\nDisallow: " + + return app + + +######## +# Helper functions +######## + + +def safe_join(directory: str, path: str) -> str: + """Safely path to a base directory to avoid escaping the base directory. + Borrowed from: werkzeug.security.safe_join""" + _os_alt_seps: List[str] = [ + sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/" + ] + + if path == "": + raise HTTPException(400) + + filename = posixpath.normpath(path) + fullpath = os.path.join(directory, filename) + if ( + any(sep in filename for sep in _os_alt_seps) + or os.path.isabs(filename) + or filename == ".." + or filename.startswith("../") + or os.path.isdir(fullpath) + ): + raise HTTPException(403) + + if not os.path.exists(fullpath): + raise HTTPException(404, "File not found") + + return fullpath + + +def get_types(cls_set: List[Type]): + docset = [] + types = [] + for cls in cls_set: + doc = inspect.getdoc(cls) or "" + doc_lines = doc.split("\n") + for line in doc_lines: + if "value (" in line: + types.append(line.split("value (")[1].split(")")[0]) + docset.append(doc_lines[1].split(":")[-1]) + return docset, types + + +set_documentation_group("routes") + + +@document() +def mount_gradio_app( + app: fastapi.FastAPI, + blocks: gradio.Blocks, + path: str, + gradio_api_url: str | None = None, + app_kwargs: dict[str, Any] | None = None, +) -> fastapi.FastAPI: + """Mount a gradio.Blocks to an existing FastAPI application. + + Parameters: + app: The parent FastAPI application. + blocks: The blocks object we want to mount to the parent app. + path: The path at which the gradio application will be mounted. + gradio_api_url: Deprecated and has no effect. + app_kwargs: Additional keyword arguments to pass to the underlying FastAPI app as a dictionary of parameter keys and argument values. For example, `{"docs_url": "/docs"}` + Example: + from fastapi import FastAPI + import gradio as gr + app = FastAPI() + @app.get("/") + def read_main(): + return {"message": "This is your main app"} + io = gr.Interface(lambda x: "Hello, " + x + "!", "textbox", "textbox") + app = gr.mount_gradio_app(app, io, path="/gradio") + # Then run `uvicorn run:app` from the terminal and navigate to http://localhost:8000/gradio. + """ + blocks.dev_mode = False + blocks.config = blocks.get_config_file() + blocks.validate_queue_settings() + gradio_app = App.create_app(blocks, app_kwargs=app_kwargs) + + if gradio_api_url is not None: + warn_deprecation("gradio_api_url is deprecated and has not effect.") + + @app.on_event("startup") + async def start_queue(): + if gradio_app.get_blocks().enable_queue: + gradio_app.get_blocks().startup_events() + + app.mount(path, gradio_app) + return app diff --git a/testbed/gradio-app__gradio/gradio/state_holder.py b/testbed/gradio-app__gradio/gradio/state_holder.py new file mode 100644 index 0000000000000000000000000000000000000000..012457f75c298c541f0bb3e23155133c7e65eb36 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/state_holder.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +import threading +from collections import OrderedDict +from copy import deepcopy +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from gradio.blocks import Blocks + + +class StateHolder: + def __init__(self): + self.capacity = 10000 + self.session_data = OrderedDict() + self.lock = threading.Lock() + + def set_blocks(self, blocks: Blocks): + self.blocks = blocks + self.capacity = blocks.state_session_capacity + + def __getitem__(self, session_id: int) -> SessionState: + if session_id not in self.session_data: + self.session_data[session_id] = SessionState(self.blocks) + self.update(session_id) + return self.session_data[session_id] + + def __contains__(self, session_id: int): + return session_id in self.session_data + + def update(self, session_id: int): + with self.lock: + if session_id in self.session_data: + self.session_data.move_to_end(session_id) + if len(self.session_data) > self.capacity: + self.session_data.popitem(last=False) + + +class SessionState: + def __init__(self, blocks: Blocks): + self.blocks = blocks + self._data = {} + + def __getitem__(self, key: int) -> Any: + if key not in self._data: + block = self.blocks.blocks[key] + if getattr(block, "stateful", False): + self._data[key] = deepcopy(getattr(block, "value", None)) + else: + self._data[key] = None + return self._data[key] + + def __setitem__(self, key: int, value: Any): + self._data[key] = value + + def __contains__(self, key: int): + return key in self._data diff --git a/testbed/gradio-app__gradio/gradio/strings.py b/testbed/gradio-app__gradio/gradio/strings.py new file mode 100644 index 0000000000000000000000000000000000000000..d85bc052969438e1e05dbf3abd9c75c8effc7d03 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/strings.py @@ -0,0 +1,48 @@ +import os +import threading +from typing import Dict + +import requests + +from gradio import wasm_utils + +MESSAGING_API_ENDPOINT = "https://api.gradio.app/gradio-messaging/en" + +en = { + "RUNNING_LOCALLY": "Running on local URL: {}", + "RUNNING_LOCALLY_SEPARATED": "Running on local URL: {}://{}:{}", + "SHARE_LINK_DISPLAY": "Running on public URL: {}", + "COULD_NOT_GET_SHARE_LINK": "\nCould not create share link. Please check your internet connection or our status page: https://status.gradio.app.", + "COULD_NOT_GET_SHARE_LINK_MISSING_FILE": "\nCould not create share link. Missing file: {}. \n\nPlease check your internet connection. This can happen if your antivirus software blocks the download of this file. You can install manually by following these steps: \n\n1. Download this file: {}\n2. Rename the downloaded file to: {}\n3. Move the file to this location: {}", + "COLAB_NO_LOCAL": "Cannot display local interface on google colab, public link created.", + "PUBLIC_SHARE_TRUE": "\nTo create a public link, set `share=True` in `launch()`.", + "MODEL_PUBLICLY_AVAILABLE_URL": "Model available publicly at: {} (may take up to a minute for link to be usable)", + "GENERATING_PUBLIC_LINK": "Generating public link (may take a few seconds...):", + "BETA_INVITE": "\nThanks for being a Gradio user! If you have questions or feedback, please join our Discord server and chat with us: https://discord.gg/feTf9x3ZSB", + "COLAB_DEBUG_TRUE": "Colab notebook detected. This cell will run indefinitely so that you can see errors and logs. " + "To turn off, set debug=False in launch().", + "COLAB_DEBUG_FALSE": "Colab notebook detected. To show errors in colab notebook, set debug=True in launch()", + "COLAB_WARNING": "Note: opening Chrome Inspector may crash demo inside Colab notebooks.", + "SHARE_LINK_MESSAGE": "\nThis share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)", + "INLINE_DISPLAY_BELOW": "Interface loading below...", + "TIPS": [ + "You can add authentication to your app with the `auth=` kwarg in the `launch()` command; for example: `gr.Interface(...).launch(auth=('username', 'password'))`", + "Let users specify why they flagged input with the `flagging_options=` kwarg; for example: `gr.Interface(..., flagging_options=['too slow', 'incorrect output', 'other'])`", + "You can show or hide the button for flagging with the `allow_flagging=` kwarg; for example: gr.Interface(..., allow_flagging=False)", + "The inputs and outputs flagged by the users are stored in the flagging directory, specified by the flagging_dir= kwarg. You can view this data through the interface by setting the examples= kwarg to the flagging directory; for example gr.Interface(..., examples='flagged')", + "You can add a title and description to your interface using the `title=` and `description=` kwargs. The `article=` kwarg can be used to add a description under the interface; for example gr.Interface(..., title='My app', description='Lorem ipsum'). Try using Markdown!", + "For a classification or regression model, set `interpretation='default'` to see why the model made a prediction.", + ], +} + + +def get_updated_messaging(en: Dict): + try: + updated_messaging = requests.get(MESSAGING_API_ENDPOINT, timeout=3).json() + en.update(updated_messaging) + except Exception: # Use default messaging + pass + + +if os.getenv("GRADIO_ANALYTICS_ENABLED", "True") == "True" and not wasm_utils.IS_WASM: + threading.Thread(target=get_updated_messaging, args=(en,)).start() diff --git a/testbed/gradio-app__gradio/gradio/templates.py b/testbed/gradio-app__gradio/gradio/templates.py new file mode 100644 index 0000000000000000000000000000000000000000..42ebb1a2d7b01acb18ae9a403d12494c1ae20c91 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/templates.py @@ -0,0 +1,586 @@ +from __future__ import annotations + +from typing import Any, Callable, Literal + +import numpy as np +from PIL.Image import Image + +from gradio import components + + +class TextArea(components.Textbox): + """ + Sets: lines=7 + """ + + is_template = True + + def __init__( + self, + value: str | Callable | None = "", + *, + lines: int = 7, + max_lines: int = 20, + placeholder: str | None = None, + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + **kwargs, + ): + super().__init__( + value=value, + lines=lines, + max_lines=max_lines, + placeholder=placeholder, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + elem_id=elem_id, + **kwargs, + ) + + +class Webcam(components.Image): + """ + Sets: source="webcam", interactive=True + """ + + is_template = True + + def __init__( + self, + value: str | Image | np.ndarray | None = None, + *, + shape: tuple[int, int] | None = None, + image_mode: Literal["RGB", "L"] = "RGB", + invert_colors: bool = False, + source: Literal["webcam"] = "webcam", + tool: Literal["editor", "select", "sketch", "color-sketch"] | None = None, + type: Literal["numpy", "pil", "filepath"] = "numpy", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = True, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + mirror_webcam: bool = True, + brush_radius: float | None = None, + brush_color: str = "#000000", + **kwargs, + ): + super().__init__( + value=value, + shape=shape, + image_mode=image_mode, + invert_colors=invert_colors, + source=source, + tool=tool, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + streaming=streaming, + elem_id=elem_id, + mirror_webcam=mirror_webcam, + brush_radius=brush_radius, + brush_color=brush_color, + **kwargs, + ) + + +class Sketchpad(components.Image): + """ + Sets: image_mode="L", source="canvas", shape=(28, 28), invert_colors=True, interactive=True + """ + + is_template = True + + def __init__( + self, + value: str | Image | np.ndarray | None = None, + *, + shape: tuple[int, int] = (28, 28), + image_mode: Literal["L"] = "L", + invert_colors: bool = True, + source: Literal["canvas"] = "canvas", + tool: Literal["editor", "select", "sketch", "color-sketch"] | None = None, + type: Literal["numpy", "pil", "filepath"] = "numpy", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = True, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + mirror_webcam: bool = True, + brush_radius: float | None = None, + brush_color: str = "#000000", + **kwargs, + ): + super().__init__( + value=value, + shape=shape, + image_mode=image_mode, + invert_colors=invert_colors, + source=source, + tool=tool, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + streaming=streaming, + elem_id=elem_id, + mirror_webcam=mirror_webcam, + brush_radius=brush_radius, + brush_color=brush_color, + **kwargs, + ) + + +class Paint(components.Image): + """ + Sets: source="canvas", tool="color-sketch", interactive=True + """ + + is_template = True + + def __init__( + self, + value: str | Image | np.ndarray | None = None, + *, + shape: tuple[int, int] | None = None, + image_mode: Literal["RGB"] = "RGB", + invert_colors: bool = False, + source: Literal["canvas"] = "canvas", + tool: Literal["color-sketch"] = "color-sketch", + type: Literal["numpy", "pil", "filepath"] = "numpy", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = True, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + mirror_webcam: bool = True, + brush_radius: float | None = None, + brush_color: str = "#000000", + **kwargs, + ): + super().__init__( + value=value, + shape=shape, + image_mode=image_mode, + invert_colors=invert_colors, + source=source, + tool=tool, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + streaming=streaming, + elem_id=elem_id, + mirror_webcam=mirror_webcam, + brush_radius=brush_radius, + brush_color=brush_color, + **kwargs, + ) + + +class ImageMask(components.Image): + """ + Sets: source="upload", tool="sketch", interactive=True + """ + + is_template = True + + def __init__( + self, + value: str | Image | np.ndarray | None = None, + *, + shape: tuple[int, int] | None = None, + image_mode: Literal["RGB", "L"] = "RGB", + invert_colors: bool = False, + source: Literal["upload"] = "upload", + tool: Literal["sketch"] = "sketch", + type: Literal["numpy", "pil", "filepath"] = "numpy", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = True, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + mirror_webcam: bool = True, + brush_radius: float | None = None, + brush_color: str = "#000000", + **kwargs, + ): + super().__init__( + value=value, + shape=shape, + image_mode=image_mode, + invert_colors=invert_colors, + source=source, + tool=tool, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + streaming=streaming, + elem_id=elem_id, + mirror_webcam=mirror_webcam, + brush_radius=brush_radius, + brush_color=brush_color, + **kwargs, + ) + + +class ImagePaint(components.Image): + """ + Sets: source="upload", tool="color-sketch", interactive=True + """ + + is_template = True + + def __init__( + self, + value: str | Image | np.ndarray | None = None, + *, + shape: tuple[int, int] | None = None, + image_mode: Literal["RGB", "L"] = "RGB", + invert_colors: bool = False, + source: Literal["upload"] = "upload", + tool: Literal["color-sketch"] = "color-sketch", + type: Literal["numpy", "pil", "filepath"] = "numpy", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = True, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + mirror_webcam: bool = True, + brush_radius: float | None = None, + brush_color: str = "#000000", + **kwargs, + ): + super().__init__( + value=value, + shape=shape, + image_mode=image_mode, + invert_colors=invert_colors, + source=source, + tool=tool, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + streaming=streaming, + elem_id=elem_id, + mirror_webcam=mirror_webcam, + brush_radius=brush_radius, + brush_color=brush_color, + **kwargs, + ) + + +class Pil(components.Image): + """ + Sets: type="pil" + """ + + is_template = True + + def __init__( + self, + value: str | Image | np.ndarray | None = None, + *, + shape: tuple[int, int] | None = None, + image_mode: Literal["RGB", "L"] = "RGB", + invert_colors: bool = False, + source: Literal["upload", "webcam", "canvas"] = "upload", + tool: Literal["editor", "select", "sketch", "color-sketch"] | None = None, + type: Literal["pil"] = "pil", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + mirror_webcam: bool = True, + brush_radius: float | None = None, + brush_color: str = "#000000", + **kwargs, + ): + super().__init__( + value=value, + shape=shape, + image_mode=image_mode, + invert_colors=invert_colors, + source=source, + tool=tool, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + streaming=streaming, + elem_id=elem_id, + mirror_webcam=mirror_webcam, + brush_radius=brush_radius, + brush_color=brush_color, + **kwargs, + ) + + +class PlayableVideo(components.Video): + """ + Sets: format="mp4" + """ + + is_template = True + + def __init__( + self, + value: str | Callable | None = None, + *, + format: Literal["mp4"] | None = "mp4", + source: Literal["upload", "webcam"] = "upload", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + mirror_webcam: bool = True, + include_audio: bool | None = None, + **kwargs, + ): + super().__init__( + value=value, + format=format, + source=source, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + elem_id=elem_id, + mirror_webcam=mirror_webcam, + include_audio=include_audio, + **kwargs, + ) + + +class Microphone(components.Audio): + """ + Sets: source="microphone" + """ + + is_template = True + + def __init__( + self, + value: str | tuple[int, np.ndarray] | Callable | None = None, + *, + source: Literal["microphone"] = "microphone", + type: Literal["numpy", "filepath"] = "numpy", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + streaming: bool = False, + elem_id: str | None = None, + **kwargs, + ): + super().__init__( + value=value, + source=source, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + streaming=streaming, + elem_id=elem_id, + **kwargs, + ) + + +class Files(components.File): + """ + Sets: file_count="multiple" + """ + + is_template = True + + def __init__( + self, + value: str | list[str] | Callable | None = None, + *, + file_count: Literal["multiple"] = "multiple", + type: Literal["file", "binary"] = "file", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + **kwargs, + ): + super().__init__( + value=value, + file_count=file_count, + type=type, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + elem_id=elem_id, + **kwargs, + ) + + +class Numpy(components.Dataframe): + """ + Sets: type="numpy" + """ + + is_template = True + + def __init__( + self, + value: list[list[Any]] | Callable | None = None, + *, + headers: list[str] | None = None, + row_count: int | tuple[int, str] = (1, "dynamic"), + col_count: int | tuple[int, str] | None = None, + datatype: str | list[str] = "str", + type: Literal["numpy"] = "numpy", + max_rows: int | None = 20, + max_cols: int | None = None, + overflow_row_behaviour: Literal["paginate", "show_ends"] = "paginate", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + wrap: bool = False, + **kwargs, + ): + super().__init__( + value=value, + headers=headers, + row_count=row_count, + col_count=col_count, + datatype=datatype, + type=type, + max_rows=max_rows, + max_cols=max_cols, + overflow_row_behaviour=overflow_row_behaviour, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + elem_id=elem_id, + wrap=wrap, + **kwargs, + ) + + +class Matrix(components.Dataframe): + """ + Sets: type="array" + """ + + is_template = True + + def __init__( + self, + value: list[list[Any]] | Callable | None = None, + *, + headers: list[str] | None = None, + row_count: int | tuple[int, str] = (1, "dynamic"), + col_count: int | tuple[int, str] | None = None, + datatype: str | list[str] = "str", + type: Literal["array"] = "array", + max_rows: int | None = 20, + max_cols: int | None = None, + overflow_row_behaviour: Literal["paginate", "show_ends"] = "paginate", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + wrap: bool = False, + **kwargs, + ): + super().__init__( + value=value, + headers=headers, + row_count=row_count, + col_count=col_count, + datatype=datatype, + type=type, + max_rows=max_rows, + max_cols=max_cols, + overflow_row_behaviour=overflow_row_behaviour, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + elem_id=elem_id, + wrap=wrap, + **kwargs, + ) + + +class List(components.Dataframe): + """ + Sets: type="array", col_count=1 + """ + + is_template = True + + def __init__( + self, + value: list[list[Any]] | Callable | None = None, + *, + headers: list[str] | None = None, + row_count: int | tuple[int, str] = (1, "dynamic"), + col_count: Literal[1] = 1, + datatype: str | list[str] = "str", + type: Literal["array"] = "array", + max_rows: int | None = 20, + max_cols: int | None = None, + overflow_row_behaviour: Literal["paginate", "show_ends"] = "paginate", + label: str | None = None, + show_label: bool = True, + interactive: bool | None = None, + visible: bool = True, + elem_id: str | None = None, + wrap: bool = False, + **kwargs, + ): + super().__init__( + value=value, + headers=headers, + row_count=row_count, + col_count=col_count, + datatype=datatype, + type=type, + max_rows=max_rows, + max_cols=max_cols, + overflow_row_behaviour=overflow_row_behaviour, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + elem_id=elem_id, + wrap=wrap, + **kwargs, + ) + + +Mic = Microphone diff --git a/testbed/gradio-app__gradio/gradio/test_data/__init__.py b/testbed/gradio-app__gradio/gradio/test_data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/gradio-app__gradio/gradio/test_data/blocks_configs.py b/testbed/gradio-app__gradio/gradio/test_data/blocks_configs.py new file mode 100644 index 0000000000000000000000000000000000000000..e250f3b132731e0bf202af8769ae51806e687ea2 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/test_data/blocks_configs.py @@ -0,0 +1,910 @@ +XRAY_CONFIG = { + "version": "3.40.1", + "mode": "blocks", + "dev_mode": True, + "analytics_enabled": False, + "components": [ + { + "id": 31, + "type": "markdown", + "props": { + "value": "

Detect Disease From Scan

\n

With this model you can lorem ipsum

\n
    \n
  • ipsum 1
  • \n
  • ipsum 2
  • \n
\n", + "visible": True, + "rtl": False, + "name": "markdown", + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 32, + "type": "checkboxgroup", + "props": { + "choices": [ + ("Covid", "Covid"), + ("Malaria", "Malaria"), + ("Lung Cancer", "Lung Cancer"), + ], + "value": [], + "type": "value", + "label": "Disease to Scan For", + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "name": "checkboxgroup", + "selectable": False, + }, + "serializer": "ListStringSerializable", + "api_info": { + "info": {"type": "array", "items": {"type": "string"}}, + "serialized_info": False, + }, + "example_inputs": {"raw": ["Covid"], "serialized": ["Covid"]}, + }, + { + "id": 33, + "type": "tabs", + "props": {"visible": True, "name": "tabs", "selectable": False}, + }, + { + "id": 34, + "type": "tabitem", + "props": {"label": "X-ray", "name": "tabitem", "selectable": False}, + }, + { + "id": 35, + "type": "row", + "props": { + "variant": "default", + "visible": True, + "equal_height": True, + "name": "row", + }, + }, + { + "id": 36, + "type": "image", + "props": { + "image_mode": "RGB", + "invert_colors": False, + "source": "upload", + "tool": "editor", + "type": "numpy", + "show_label": True, + "show_download_button": True, + "container": True, + "min_width": 160, + "visible": True, + "streaming": False, + "mirror_webcam": True, + "brush_color": "#000000", + "mask_opacity": 0.7, + "show_share_button": False, + "name": "image", + "selectable": False, + }, + "serializer": "ImgSerializable", + "api_info": { + "info": { + "type": "string", + "description": "base64 representation of an image", + }, + "serialized_info": True, + }, + "example_inputs": { + "raw": "data:image/png;base64,R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==", + "serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", + }, + }, + { + "id": 37, + "type": "json", + "props": { + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "name": "json", + }, + "serializer": "JSONSerializable", + "api_info": { + "info": {"type": {}, "description": "any valid json"}, + "serialized_info": True, + }, + "example_inputs": {"raw": {"a": 1, "b": 2}, "serialized": None}, + }, + { + "id": 38, + "type": "button", + "props": { + "value": "Run", + "variant": "secondary", + "visible": True, + "interactive": True, + "name": "button", + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 39, + "type": "tabitem", + "props": {"label": "CT Scan", "name": "tabitem", "selectable": False}, + }, + { + "id": 40, + "type": "row", + "props": { + "variant": "default", + "visible": True, + "equal_height": True, + "name": "row", + }, + }, + { + "id": 41, + "type": "image", + "props": { + "image_mode": "RGB", + "invert_colors": False, + "source": "upload", + "tool": "editor", + "type": "numpy", + "show_label": True, + "show_download_button": True, + "container": True, + "min_width": 160, + "visible": True, + "streaming": False, + "mirror_webcam": True, + "brush_color": "#000000", + "mask_opacity": 0.7, + "show_share_button": False, + "name": "image", + "selectable": False, + }, + "serializer": "ImgSerializable", + "api_info": { + "info": { + "type": "string", + "description": "base64 representation of an image", + }, + "serialized_info": True, + }, + "example_inputs": { + "raw": "data:image/png;base64,R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==", + "serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", + }, + }, + { + "id": 42, + "type": "json", + "props": { + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "name": "json", + }, + "serializer": "JSONSerializable", + "api_info": { + "info": {"type": {}, "description": "any valid json"}, + "serialized_info": True, + }, + "example_inputs": {"raw": {"a": 1, "b": 2}, "serialized": None}, + }, + { + "id": 43, + "type": "button", + "props": { + "value": "Run", + "variant": "secondary", + "visible": True, + "interactive": True, + "name": "button", + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 44, + "type": "textbox", + "props": { + "value": "", + "lines": 1, + "max_lines": 20, + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "autofocus": False, + "type": "text", + "rtl": False, + "show_copy_button": False, + "name": "textbox", + "selectable": False, + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 45, + "type": "form", + "props": {"scale": 0, "min_width": 0, "name": "form"}, + }, + { + "id": 46, + "type": "form", + "props": {"scale": 0, "min_width": 0, "name": "form"}, + }, + ], + "css": None, + "title": "Gradio", + "space_id": None, + "enable_queue": None, + "show_error": True, + "show_api": True, + "is_colab": False, + "stylesheets": [ + "https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@400;600&display=swap", + "https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:wght@400;600&display=swap", + ], + "theme": "default", + "layout": { + "id": 30, + "children": [ + {"id": 31}, + {"id": 45, "children": [{"id": 32}]}, + { + "id": 33, + "children": [ + { + "id": 34, + "children": [ + {"id": 35, "children": [{"id": 36}, {"id": 37}]}, + {"id": 38}, + ], + }, + { + "id": 39, + "children": [ + {"id": 40, "children": [{"id": 41}, {"id": 42}]}, + {"id": 43}, + ], + }, + ], + }, + {"id": 46, "children": [{"id": 44}]}, + ], + }, + "dependencies": [ + { + "targets": [38], + "trigger": "click", + "inputs": [32, 36], + "outputs": [37], + "backend_fn": True, + "js": None, + "queue": None, + "api_name": None, + "scroll_to_output": False, + "show_progress": "full", + "every": None, + "batch": False, + "max_batch_size": 4, + "cancels": [], + "types": {"continuous": False, "generator": False}, + "collects_event_data": False, + "trigger_after": None, + "trigger_only_on_success": False, + }, + { + "targets": [43], + "trigger": "click", + "inputs": [32, 41], + "outputs": [42], + "backend_fn": True, + "js": None, + "queue": None, + "api_name": None, + "scroll_to_output": False, + "show_progress": "full", + "every": None, + "batch": False, + "max_batch_size": 4, + "cancels": [], + "types": {"continuous": False, "generator": False}, + "collects_event_data": False, + "trigger_after": None, + "trigger_only_on_success": False, + }, + { + "targets": [], + "trigger": "load", + "inputs": [], + "outputs": [44], + "backend_fn": True, + "js": None, + "queue": None, + "api_name": None, + "scroll_to_output": False, + "show_progress": "full", + "every": None, + "batch": False, + "max_batch_size": 4, + "cancels": [], + "types": {"continuous": False, "generator": False}, + "collects_event_data": False, + "trigger_after": None, + "trigger_only_on_success": False, + }, + ], +} + +XRAY_CONFIG_DIFF_IDS = { + "version": "3.32.0\n", + "mode": "blocks", + "dev_mode": True, + "analytics_enabled": False, + "components": [ + { + "id": 1, + "type": "markdown", + "props": { + "value": "

Detect Disease From Scan

\n

With this model you can lorem ipsum

\n
    \n
  • ipsum 1
  • \n
  • ipsum 2
  • \n
\n", + "visible": True, + "rtl": False, + "name": "markdown", + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 2, + "type": "checkboxgroup", + "props": { + "choices": [ + ("Covid", "Covid"), + ("Malaria", "Malaria"), + ("Lung Cancer", "Lung Cancer"), + ], + "value": [], + "type": "value", + "label": "Disease to Scan For", + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "name": "checkboxgroup", + "selectable": False, + }, + "serializer": "ListStringSerializable", + "api_info": { + "info": {"type": "array", "items": {"type": "string"}}, + "serialized_info": False, + }, + "example_inputs": {"raw": ["Covid"], "serialized": ["Covid"]}, + }, + { + "id": 3, + "type": "tabs", + "props": {"visible": True, "name": "tabs", "selectable": False}, + }, + { + "id": 4, + "type": "tabitem", + "props": {"label": "X-ray", "name": "tabitem", "selectable": False}, + }, + { + "id": 5, + "type": "row", + "props": { + "variant": "default", + "visible": True, + "equal_height": True, + "name": "row", + }, + }, + { + "id": 6, + "type": "image", + "props": { + "image_mode": "RGB", + "invert_colors": False, + "source": "upload", + "tool": "editor", + "type": "numpy", + "show_label": True, + "show_download_button": True, + "container": True, + "min_width": 160, + "visible": True, + "streaming": False, + "mirror_webcam": True, + "brush_color": "#000000", + "mask_opacity": 0.7, + "show_share_button": False, + "name": "image", + "selectable": False, + }, + "serializer": "ImgSerializable", + "api_info": { + "info": { + "type": "string", + "description": "base64 representation of an image", + }, + "serialized_info": True, + }, + "example_inputs": { + "raw": "data:image/png;base64,R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==", + "serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", + }, + }, + { + "id": 7, + "type": "json", + "props": { + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "name": "json", + }, + "serializer": "JSONSerializable", + "api_info": { + "info": {"type": {}, "description": "any valid json"}, + "serialized_info": True, + }, + "example_inputs": {"raw": {"a": 1, "b": 2}, "serialized": None}, + }, + { + "id": 8, + "type": "button", + "props": { + "value": "Run", + "variant": "secondary", + "visible": True, + "interactive": True, + "name": "button", + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 9, + "type": "tabitem", + "props": {"label": "CT Scan", "name": "tabitem", "selectable": False}, + }, + { + "id": 10, + "type": "row", + "props": { + "variant": "default", + "visible": True, + "equal_height": True, + "name": "row", + }, + }, + { + "id": 11, + "type": "image", + "props": { + "image_mode": "RGB", + "invert_colors": False, + "source": "upload", + "tool": "editor", + "type": "numpy", + "show_label": True, + "show_download_button": True, + "container": True, + "min_width": 160, + "visible": True, + "streaming": False, + "mirror_webcam": True, + "brush_color": "#000000", + "mask_opacity": 0.7, + "show_share_button": False, + "name": "image", + "selectable": False, + }, + "serializer": "ImgSerializable", + "api_info": { + "info": { + "type": "string", + "description": "base64 representation of an image", + }, + "serialized_info": True, + }, + "example_inputs": { + "raw": "data:image/png;base64,R0lGODlhPQBEAPeoAJosM//AwO/AwHVYZ/z595kzAP/s7P+goOXMv8+fhw/v739/f+8PD98fH/8mJl+fn/9ZWb8/PzWlwv///6wWGbImAPgTEMImIN9gUFCEm/gDALULDN8PAD6atYdCTX9gUNKlj8wZAKUsAOzZz+UMAOsJAP/Z2ccMDA8PD/95eX5NWvsJCOVNQPtfX/8zM8+QePLl38MGBr8JCP+zs9myn/8GBqwpAP/GxgwJCPny78lzYLgjAJ8vAP9fX/+MjMUcAN8zM/9wcM8ZGcATEL+QePdZWf/29uc/P9cmJu9MTDImIN+/r7+/vz8/P8VNQGNugV8AAF9fX8swMNgTAFlDOICAgPNSUnNWSMQ5MBAQEJE3QPIGAM9AQMqGcG9vb6MhJsEdGM8vLx8fH98AANIWAMuQeL8fABkTEPPQ0OM5OSYdGFl5jo+Pj/+pqcsTE78wMFNGQLYmID4dGPvd3UBAQJmTkP+8vH9QUK+vr8ZWSHpzcJMmILdwcLOGcHRQUHxwcK9PT9DQ0O/v70w5MLypoG8wKOuwsP/g4P/Q0IcwKEswKMl8aJ9fX2xjdOtGRs/Pz+Dg4GImIP8gIH0sKEAwKKmTiKZ8aB/f39Wsl+LFt8dgUE9PT5x5aHBwcP+AgP+WltdgYMyZfyywz78AAAAAAAD///8AAP9mZv///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAKgALAAAAAA9AEQAAAj/AFEJHEiwoMGDCBMqXMiwocAbBww4nEhxoYkUpzJGrMixogkfGUNqlNixJEIDB0SqHGmyJSojM1bKZOmyop0gM3Oe2liTISKMOoPy7GnwY9CjIYcSRYm0aVKSLmE6nfq05QycVLPuhDrxBlCtYJUqNAq2bNWEBj6ZXRuyxZyDRtqwnXvkhACDV+euTeJm1Ki7A73qNWtFiF+/gA95Gly2CJLDhwEHMOUAAuOpLYDEgBxZ4GRTlC1fDnpkM+fOqD6DDj1aZpITp0dtGCDhr+fVuCu3zlg49ijaokTZTo27uG7Gjn2P+hI8+PDPERoUB318bWbfAJ5sUNFcuGRTYUqV/3ogfXp1rWlMc6awJjiAAd2fm4ogXjz56aypOoIde4OE5u/F9x199dlXnnGiHZWEYbGpsAEA3QXYnHwEFliKAgswgJ8LPeiUXGwedCAKABACCN+EA1pYIIYaFlcDhytd51sGAJbo3onOpajiihlO92KHGaUXGwWjUBChjSPiWJuOO/LYIm4v1tXfE6J4gCSJEZ7YgRYUNrkji9P55sF/ogxw5ZkSqIDaZBV6aSGYq/lGZplndkckZ98xoICbTcIJGQAZcNmdmUc210hs35nCyJ58fgmIKX5RQGOZowxaZwYA+JaoKQwswGijBV4C6SiTUmpphMspJx9unX4KaimjDv9aaXOEBteBqmuuxgEHoLX6Kqx+yXqqBANsgCtit4FWQAEkrNbpq7HSOmtwag5w57GrmlJBASEU18ADjUYb3ADTinIttsgSB1oJFfA63bduimuqKB1keqwUhoCSK374wbujvOSu4QG6UvxBRydcpKsav++Ca6G8A6Pr1x2kVMyHwsVxUALDq/krnrhPSOzXG1lUTIoffqGR7Goi2MAxbv6O2kEG56I7CSlRsEFKFVyovDJoIRTg7sugNRDGqCJzJgcKE0ywc0ELm6KBCCJo8DIPFeCWNGcyqNFE06ToAfV0HBRgxsvLThHn1oddQMrXj5DyAQgjEHSAJMWZwS3HPxT/QMbabI/iBCliMLEJKX2EEkomBAUCxRi42VDADxyTYDVogV+wSChqmKxEKCDAYFDFj4OmwbY7bDGdBhtrnTQYOigeChUmc1K3QTnAUfEgGFgAWt88hKA6aCRIXhxnQ1yg3BCayK44EWdkUQcBByEQChFXfCB776aQsG0BIlQgQgE8qO26X1h8cEUep8ngRBnOy74E9QgRgEAC8SvOfQkh7FDBDmS43PmGoIiKUUEGkMEC/PJHgxw0xH74yx/3XnaYRJgMB8obxQW6kL9QYEJ0FIFgByfIL7/IQAlvQwEpnAC7DtLNJCKUoO/w45c44GwCXiAFB/OXAATQryUxdN4LfFiwgjCNYg+kYMIEFkCKDs6PKAIJouyGWMS1FSKJOMRB/BoIxYJIUXFUxNwoIkEKPAgCBZSQHQ1A2EWDfDEUVLyADj5AChSIQW6gu10bE/JG2VnCZGfo4R4d0sdQoBAHhPjhIB94v/wRoRKQWGRHgrhGSQJxCS+0pCZbEhAAOw==", + "serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png", + }, + }, + { + "id": 12, + "type": "json", + "props": { + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "name": "json", + }, + "serializer": "JSONSerializable", + "api_info": { + "info": {"type": {}, "description": "any valid json"}, + "serialized_info": True, + }, + "example_inputs": {"raw": {"a": 1, "b": 2}, "serialized": None}, + }, + { + "id": 13, + "type": "button", + "props": { + "value": "Run", + "variant": "secondary", + "visible": True, + "interactive": True, + "name": "button", + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 14, + "type": "textbox", + "props": { + "value": "", + "lines": 1, + "max_lines": 20, + "show_label": True, + "container": True, + "min_width": 160, + "visible": True, + "autofocus": False, + "type": "text", + "rtl": False, + "show_copy_button": False, + "name": "textbox", + "selectable": False, + }, + "serializer": "StringSerializable", + "api_info": {"info": {"type": "string"}, "serialized_info": False}, + "example_inputs": {"raw": "Howdy!", "serialized": "Howdy!"}, + }, + { + "id": 15, + "type": "form", + "props": {"scale": 0, "min_width": 0, "name": "form"}, + }, + { + "id": 16, + "type": "form", + "props": {"scale": 0, "min_width": 0, "name": "form"}, + }, + ], + "css": None, + "title": "Gradio", + "space_id": None, + "enable_queue": None, + "show_error": True, + "show_api": True, + "is_colab": False, + "stylesheets": [ + "https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@400;600&display=swap", + "https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:wght@400;600&display=swap", + ], + "theme": "default", + "layout": { + "id": 0, + "children": [ + {"id": 1}, + {"id": 15, "children": [{"id": 2}]}, + { + "id": 3, + "children": [ + { + "id": 4, + "children": [ + {"id": 5, "children": [{"id": 6}, {"id": 7}]}, + {"id": 8}, + ], + }, + { + "id": 9, + "children": [ + {"id": 10, "children": [{"id": 11}, {"id": 12}]}, + {"id": 13}, + ], + }, + ], + }, + {"id": 16, "children": [{"id": 14}]}, + ], + }, + "dependencies": [ + { + "targets": [8], + "trigger": "click", + "inputs": [2, 6], + "outputs": [7], + "backend_fn": True, + "js": None, + "queue": None, + "api_name": None, + "scroll_to_output": False, + "show_progress": "full", + "every": None, + "batch": False, + "max_batch_size": 4, + "cancels": [], + "types": {"continuous": False, "generator": False}, + "collects_event_data": False, + "trigger_after": None, + "trigger_only_on_success": False, + }, + { + "targets": [13], + "trigger": "click", + "inputs": [2, 11], + "outputs": [12], + "backend_fn": True, + "js": None, + "queue": None, + "api_name": None, + "scroll_to_output": False, + "show_progress": "full", + "every": None, + "batch": False, + "max_batch_size": 4, + "cancels": [], + "types": {"continuous": False, "generator": False}, + "collects_event_data": False, + "trigger_after": None, + "trigger_only_on_success": False, + }, + { + "targets": [], + "trigger": "load", + "inputs": [], + "outputs": [14], + "backend_fn": True, + "js": None, + "queue": None, + "api_name": None, + "scroll_to_output": False, + "show_progress": "full", + "every": None, + "batch": False, + "max_batch_size": 4, + "cancels": [], + "types": {"continuous": False, "generator": False}, + "collects_event_data": False, + "trigger_after": None, + "trigger_only_on_success": False, + }, + ], +} + + +XRAY_CONFIG_WITH_MISTAKE = { + "mode": "blocks", + "dev_mode": True, + "analytics_enabled": False, + "theme": "default", + "components": [ + { + "id": 1, + "type": "markdown", + "props": { + "value": "

Detect Disease From Scan

\n

With this model you can lorem ipsum

\n
    \n
  • ipsum 1
  • \n
  • ipsum 2
  • \n
\n", + "name": "markdown", + "rtl": False, + }, + }, + { + "id": 2, + "type": "checkboxgroup", + "props": { + "choices": [ + ("Covid", "Covid"), + ("Malaria", "Malaria"), + ("Lung Cancer", "Lung Cancer"), + ], + "value": [], + "name": "checkboxgroup", + "selectable": False, + "show_label": True, + "label": "Disease to Scan For", + "container": True, + "min_width": 160, + }, + }, + { + "id": 3, + "type": "tabs", + "props": { + "value": True, + }, + }, + { + "id": 4, + "type": "tabitem", + "props": { + "label": "X-ray", + "value": True, + }, + }, + { + "id": 5, + "type": "row", + "props": { + "type": "row", + "variant": "default", + "equal_height": True, + "value": True, + }, + }, + { + "id": 6, + "type": "image", + "props": { + "image_mode": "RGB", + "brush_color": "#000000", + "mask_opacity": 0.7, + "source": "upload", + "streaming": False, + "mirror_webcam": True, + "tool": "editor", + "name": "image", + "selectable": False, + "show_share_button": False, + }, + }, + { + "id": 7, + "type": "json", + "props": { + "name": "json", + }, + }, + { + "id": 8, + "type": "button", + "props": { + "value": "Run", + "name": "button", + "interactive": True, + "css": {"background-color": "red", "--hover-color": "orange"}, + "variant": "secondary", + }, + }, + { + "id": 9, + "type": "tabitem", + "props": { + "show_label": True, + "label": "CT Scan", + "value": True, + }, + }, + { + "id": 10, + "type": "row", + "props": { + "type": "row", + "variant": "default", + "equal_height": True, + "value": True, + }, + }, + { + "id": 11, + "type": "image", + "props": { + "image_mode": "RGB", + "brush_color": "#000000", + "mask_opacity": 0.7, + "source": "upload", + "tool": "editor", + "streaming": False, + "mirror_webcam": True, + "name": "image", + "selectable": False, + "show_share_button": False, + }, + }, + { + "id": 12, + "type": "json", + "props": { + "name": "json", + }, + }, + { + "id": 13, + "type": "button", + "props": { + "value": "Run", + "interactive": True, + "name": "button", + "variant": "secondary", + }, + }, + { + "id": 14, + "type": "textbox", + "props": { + "lines": 1, + "value": "", + "name": "textbox", + "selectable": False, + "show_copy_button": False, + "type": "text", + "rtl": False, + "autofocus": False, + }, + }, + ], + "layout": { + "id": 0, + "children": [ + {"id": 1}, + {"id": 2}, + { + "id": 3, + "children": [ + { + "id": 4, + "children": [ + {"id": 5, "children": [{"id": 6}, {"id": 7}]}, + {"id": 8}, + ], + }, + { + "id": 9, + "children": [ + {"id": 10, "children": [{"id": 12}, {"id": 11}]}, + {"id": 13}, + ], + }, + ], + }, + {"id": 14}, + ], + }, + "dependencies": [ + { + "targets": [8], + "trigger": "click", + "inputs": [2, 6], + "outputs": [7], + "api_name": None, + "scroll_to_output": False, + "cancels": [], + "trigger_after": None, + "trigger_only_on_success": False, + "show_progress": "full", + }, + { + "targets": [13], + "trigger": "click", + "inputs": [2, 11], + "outputs": [12], + "api_name": None, + "scroll_to_output": False, + "cancels": [], + "trigger_after": None, + "trigger_only_on_success": False, + "show_progress": "full", + }, + ], +} diff --git a/testbed/gradio-app__gradio/gradio/test_data/flagged_no_log/a.txt b/testbed/gradio-app__gradio/gradio/test_data/flagged_no_log/a.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/gradio-app__gradio/gradio/test_data/flagged_no_log/b.txt b/testbed/gradio-app__gradio/gradio/test_data/flagged_no_log/b.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/gradio-app__gradio/gradio/test_data/flagged_no_log/c.txt b/testbed/gradio-app__gradio/gradio/test_data/flagged_no_log/c.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/gradio-app__gradio/gradio/test_data/flagged_with_log/log.csv b/testbed/gradio-app__gradio/gradio/test_data/flagged_with_log/log.csv new file mode 100644 index 0000000000000000000000000000000000000000..f09847b6f72b43bc6191bcb799cbb6d5df04b543 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/test_data/flagged_with_log/log.csv @@ -0,0 +1,3 @@ +input,output +10,20 +30,60 diff --git a/testbed/gradio-app__gradio/gradio/themes/__init__.py b/testbed/gradio-app__gradio/gradio/themes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f7adbe74eef8ecee7e70d00a759c0fcf807d3185 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/__init__.py @@ -0,0 +1,30 @@ +from gradio.themes.base import Base, ThemeClass +from gradio.themes.default import Default +from gradio.themes.glass import Glass +from gradio.themes.monochrome import Monochrome +from gradio.themes.soft import Soft +from gradio.themes.utils import colors, sizes +from gradio.themes.utils.colors import Color +from gradio.themes.utils.fonts import Font, GoogleFont +from gradio.themes.utils.sizes import Size + +__all__ = [ + "Base", + "Color", + "Default", + "Font", + "Glass", + "GoogleFont", + "Monochrome", + "Size", + "Soft", + "ThemeClass", + "colors", + "sizes", +] + + +def builder(*args, **kwargs): + from gradio.themes.builder_app import demo + + return demo.launch(*args, **kwargs) diff --git a/testbed/gradio-app__gradio/gradio/themes/app.py b/testbed/gradio-app__gradio/gradio/themes/app.py new file mode 100644 index 0000000000000000000000000000000000000000..0c4c5a5e4050d81fc1ba684175f285cfef7670db --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/app.py @@ -0,0 +1,146 @@ +import time + +import gradio as gr +from gradio.themes.utils.theme_dropdown import create_theme_dropdown + +dropdown, js = create_theme_dropdown() + +with gr.Blocks(theme=gr.themes.Default()) as demo: + with gr.Row().style(equal_height=True): + with gr.Column(scale=10): + gr.Markdown( + """ + # Theme preview: `{THEME}` + To use this theme, set `theme='{AUTHOR}/{SPACE_NAME}'` in `gr.Blocks()` or `gr.Interface()`. + You can append an `@` and a semantic version expression, e.g. @>=1.0.0,<2.0.0 to pin to a given version + of this theme. + """ + ) + with gr.Column(scale=3): + with gr.Box(): + dropdown.render() + toggle_dark = gr.Button(value="Toggle Dark").style(full_width=True) + + dropdown.change(None, dropdown, None, _js=js) + toggle_dark.click( + None, + _js=""" + () => { + document.body.classList.toggle('dark'); + } + """, + ) + + name = gr.Textbox( + label="Name", + info="Full name, including middle name. No special characters.", + placeholder="John Doe", + value="John Doe", + interactive=True, + ) + + with gr.Row(): + slider1 = gr.Slider(label="Slider 1") + slider2 = gr.Slider(label="Slider 2") + gr.CheckboxGroup(["A", "B", "C"], label="Checkbox Group") + + with gr.Row(): + with gr.Column(variant="panel", scale=1): + gr.Markdown("## Panel 1") + radio = gr.Radio( + ["A", "B", "C"], + label="Radio", + info="Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.", + ) + drop = gr.Dropdown(["Option 1", "Option 2", "Option 3"], show_label=False) + drop_2 = gr.Dropdown( + ["Option A", "Option B", "Option C"], + multiselect=True, + value=["Option A"], + label="Dropdown", + interactive=True, + ) + check = gr.Checkbox(label="Go") + with gr.Column(variant="panel", scale=2): + img = gr.Image( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/header-image.jpg", + label="Image", + ).style(height=320) + with gr.Row(): + go_btn = gr.Button("Go", label="Primary Button", variant="primary") + clear_btn = gr.Button( + "Clear", label="Secondary Button", variant="secondary" + ) + + def go(*args): + time.sleep(3) + return "https://gradio-static-files.s3.us-west-2.amazonaws.com/header-image.jpgjpg" + + go_btn.click(go, [radio, drop, drop_2, check, name], img, api_name="go") + + def clear(): + time.sleep(0.2) + return None + + clear_btn.click(clear, None, img) + + with gr.Row(): + btn1 = gr.Button("Button 1").style(size="sm") + btn2 = gr.UploadButton().style(size="sm") + stop_btn = gr.Button("Stop", label="Stop Button", variant="stop").style( + size="sm" + ) + + with gr.Row(): + gr.Dataframe(value=[[1, 2, 3], [4, 5, 6], [7, 8, 9]], label="Dataframe") + gr.JSON( + value={"a": 1, "b": 2, "c": {"test": "a", "test2": [1, 2, 3]}}, label="JSON" + ) + gr.Label(value={"cat": 0.7, "dog": 0.2, "fish": 0.1}) + gr.File() + with gr.Row(): + gr.ColorPicker() + gr.Video("https://gradio-static-files.s3.us-west-2.amazonaws.com/world.mp4") + gr.Gallery( + [ + ( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/lion.jpg", + "lion", + ), + ( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/logo.png", + "logo", + ), + ( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/tower.jpg", + "tower", + ), + ] + ).style(height="200px", grid=2) + + with gr.Row(): + with gr.Column(scale=2): + chatbot = gr.Chatbot([("Hello", "Hi")], label="Chatbot") + chat_btn = gr.Button("Add messages") + + def chat(history): + time.sleep(2) + yield [["How are you?", "I am good."]] + + chat_btn.click( + lambda history: history + + [["How are you?", "I am good."]] + + (time.sleep(2) or []), + chatbot, + chatbot, + ) + with gr.Column(scale=1): + with gr.Accordion("Advanced Settings"): + gr.Markdown("Hello") + gr.Number(label="Chatbot control 1") + gr.Number(label="Chatbot control 2") + gr.Number(label="Chatbot control 3") + + +if __name__ == "__main__": + demo.queue().launch() diff --git a/testbed/gradio-app__gradio/gradio/themes/base.py b/testbed/gradio-app__gradio/gradio/themes/base.py new file mode 100644 index 0000000000000000000000000000000000000000..978155a4bca04e0d144476e6bd082205994ddb33 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/base.py @@ -0,0 +1,1828 @@ +from __future__ import annotations + +import json +import re +import tempfile +import textwrap +from pathlib import Path +from typing import Iterable + +import huggingface_hub +import requests +import semantic_version as semver +from gradio_client.documentation import document, set_documentation_group +from huggingface_hub import CommitOperationAdd + +from gradio.themes.utils import ( + colors, + fonts, + get_matching_version, + get_theme_assets, + sizes, +) +from gradio.themes.utils.readme_content import README_CONTENT + +set_documentation_group("themes") + + +class ThemeClass: + def __init__(self): + self._stylesheets = [] + self.name = None + + def _get_theme_css(self): + css = {} + dark_css = {} + + for attr, val in self.__dict__.items(): + if attr.startswith("_"): + continue + if val is None: + if attr.endswith("_dark"): + dark_css[attr[:-5]] = None + continue + else: + raise ValueError( + f"Cannot set '{attr}' to None - only dark mode variables can be None." + ) + val = str(val) + pattern = r"(\*)([\w_]+)(\b)" + + def repl_func(match): + full_match = match.group(0) + if full_match.startswith("*") and full_match.endswith("_dark"): + raise ValueError( + f"Cannot refer '{attr}' to '{val}' - dark variable references are automatically used for dark mode attributes, so do not use the _dark suffix in the value." + ) + if ( + attr.endswith("_dark") + and full_match.startswith("*") + and attr[:-5] == full_match[1:] + ): + raise ValueError( + f"Cannot refer '{attr}' to '{val}' - if dark and light mode values are the same, set dark mode version to None." + ) + + word = match.group(2) + word = word.replace("_", "-") + return f"var(--{word})" + + val = re.sub(pattern, repl_func, val) + + attr = attr.replace("_", "-") + + if attr.endswith("-dark"): + attr = attr[:-5] + dark_css[attr] = val + else: + css[attr] = val + + for attr, val in css.items(): + if attr not in dark_css: + dark_css[attr] = val + + css_code = ( + ":root {\n" + + "\n".join([f" --{attr}: {val};" for attr, val in css.items()]) + + "\n}" + ) + dark_css_code = ( + ".dark {\n" + + "\n".join([f" --{attr}: {val};" for attr, val in dark_css.items()]) + + "\n}" + ) + + return f"{css_code}\n{dark_css_code}" + + def to_dict(self): + """Convert the theme into a python dictionary.""" + schema = {"theme": {}} + for prop in dir(self): + if ( + not prop.startswith("_") + or prop.startswith("_font") + or prop == "_stylesheets" + or prop == "name" + ) and isinstance(getattr(self, prop), (list, str)): + schema["theme"][prop] = getattr(self, prop) + return schema + + @classmethod + def load(cls, path: str) -> ThemeClass: + """Load a theme from a json file. + + Parameters: + path: The filepath to read. + """ + with open(path) as fp: + return cls.from_dict(json.load(fp, object_hook=fonts.as_font)) + + @classmethod + def from_dict(cls, theme: dict[str, dict[str, str]]) -> ThemeClass: + """Create a theme instance from a dictionary representation. + + Parameters: + theme: The dictionary representation of the theme. + """ + new_theme = cls() + for prop, value in theme["theme"].items(): + setattr(new_theme, prop, value) + + # For backwards compatibility, load attributes in base theme not in the loaded theme from the base theme. + base = Base() + for attr in base.__dict__: + if not attr.startswith("_") and not hasattr(new_theme, attr): + setattr(new_theme, attr, getattr(base, attr)) + + return new_theme + + def dump(self, filename: str): + """Write the theme to a json file. + + Parameters: + filename: The path to write the theme too + """ + Path(filename).write_text(json.dumps(self.to_dict(), cls=fonts.FontEncoder)) + + @classmethod + def from_hub(cls, repo_name: str, hf_token: str | None = None): + """Load a theme from the hub. + + This DOES NOT require a HuggingFace account for downloading publicly available themes. + + Parameters: + repo_name: string of the form /@. If a semantic version expression is omitted, the latest version will be fetched. + hf_token: HuggingFace Token. Only needed to download private themes. + """ + if "@" not in repo_name: + name, version = repo_name, None + else: + name, version = repo_name.split("@") + + api = huggingface_hub.HfApi(token=hf_token) + + try: + space_info = api.space_info(name) + except requests.HTTPError as e: + raise ValueError(f"The space {name} does not exist") from e + + assets = get_theme_assets(space_info) + matching_version = get_matching_version(assets, version) + + if not matching_version: + raise ValueError( + f"Cannot find a matching version for expression {version} " + f"from files {[f.filename for f in assets]}" + ) + + theme_file = huggingface_hub.hf_hub_download( + repo_id=name, + repo_type="space", + filename=f"themes/theme_schema@{matching_version.version}.json", + ) + theme = cls.load(theme_file) + theme.name = name + return theme + + @staticmethod + def _get_next_version(space_info: huggingface_hub.hf_api.SpaceInfo) -> str: + assets = get_theme_assets(space_info) + latest_version = max(assets, key=lambda asset: asset.version).version + return str(latest_version.next_patch()) + + @staticmethod + def _theme_version_exists( + space_info: huggingface_hub.hf_api.SpaceInfo, version: str + ) -> bool: + assets = get_theme_assets(space_info) + return any(a.version == semver.Version(version) for a in assets) + + def push_to_hub( + self, + repo_name: str, + org_name: str | None = None, + version: str | None = None, + hf_token: str | None = None, + theme_name: str | None = None, + description: str | None = None, + private: bool = False, + ): + """Upload a theme to the HuggingFace hub. + + This requires a HuggingFace account. + + Parameters: + repo_name: The name of the repository to store the theme assets, e.g. 'my_theme' or 'sunset'. + org_name: The name of the org to save the space in. If None (the default), the username corresponding to the logged in user, or hƒ_token is used. + version: A semantic version tag for theme. Bumping the version tag lets you publish updates to a theme without changing the look of applications that already loaded your theme. + hf_token: API token for your HuggingFace account + theme_name: Name for the name. If None, defaults to repo_name + description: A long form description to your theme. + """ + + from gradio import __version__ + + api = huggingface_hub.HfApi() + + if not hf_token: + try: + author = huggingface_hub.whoami()["name"] + except OSError as e: + raise ValueError( + "In order to push to hub, log in via `huggingface-cli login` " + "or provide a theme_token to push_to_hub. For more information " + "see https://huggingface.co/docs/huggingface_hub/quick-start#login" + ) from e + else: + author = huggingface_hub.whoami(token=hf_token)["name"] + + space_id = f"{org_name or author}/{repo_name}" + + try: + space_info = api.space_info(space_id) + except requests.HTTPError: + space_info = None + + space_exists = space_info is not None + + # If no version, set the version to next patch release + if not version: + version = self._get_next_version(space_info) if space_exists else "0.0.1" + else: + _ = semver.Version(version) + + if space_exists and self._theme_version_exists(space_info, version): + raise ValueError( + f"The space {space_id} already has a " + f"theme with version {version}. See: themes/theme_schema@{version}.json. " + "To manually override this version, use the HuggingFace hub UI." + ) + + theme_name = theme_name or repo_name + + with tempfile.NamedTemporaryFile( + mode="w", delete=False, suffix=".json" + ) as css_file: + contents = self.to_dict() + contents["version"] = version + json.dump(contents, css_file, cls=fonts.FontEncoder) + with tempfile.NamedTemporaryFile(mode="w", delete=False) as readme_file: + readme_content = README_CONTENT.format( + theme_name=theme_name, + description=description or "Add a description of this theme here!", + author=author, + gradio_version=__version__, + ) + readme_file.write(textwrap.dedent(readme_content)) + with tempfile.NamedTemporaryFile(mode="w", delete=False) as app_file: + contents = (Path(__file__).parent / "app.py").read_text() + contents = re.sub( + r"theme=gr.themes.Default\(\)", + f"theme='{space_id}'", + contents, + ) + contents = re.sub(r"{THEME}", theme_name or repo_name, contents) + contents = re.sub(r"{AUTHOR}", org_name or author, contents) + contents = re.sub(r"{SPACE_NAME}", repo_name, contents) + app_file.write(contents) + + operations = [ + CommitOperationAdd( + path_in_repo=f"themes/theme_schema@{version}.json", + path_or_fileobj=css_file.name, + ), + CommitOperationAdd( + path_in_repo="README.md", path_or_fileobj=readme_file.name + ), + CommitOperationAdd(path_in_repo="app.py", path_or_fileobj=app_file.name), + ] + + huggingface_hub.create_repo( + space_id, + repo_type="space", + space_sdk="gradio", + token=hf_token, + exist_ok=True, + private=private, + ) + + api.create_commit( + repo_id=space_id, + commit_message="Updating theme", + repo_type="space", + operations=operations, + token=hf_token, + ) + url = f"https://huggingface.co/spaces/{space_id}" + print(f"See your theme here! {url}") + return url + + +@document("push_to_hub", "from_hub", "load", "dump", "from_dict", "to_dict") +class Base(ThemeClass): + def __init__( + self, + *, + primary_hue: colors.Color | str = colors.blue, + secondary_hue: colors.Color | str = colors.blue, + neutral_hue: colors.Color | str = colors.gray, + text_size: sizes.Size | str = sizes.text_md, + spacing_size: sizes.Size | str = sizes.spacing_md, + radius_size: sizes.Size | str = sizes.radius_md, + font: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + fonts.GoogleFont("Source Sans Pro"), + "ui-sans-serif", + "system-ui", + "sans-serif", + ), + font_mono: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + fonts.GoogleFont("IBM Plex Mono"), + "ui-monospace", + "Consolas", + "monospace", + ), + ): + """ + Parameters: + primary_hue: The primary hue of the theme. Load a preset, like gradio.themes.colors.green (or just the string "green"), or pass your own gradio.themes.utils.Color object. + secondary_hue: The secondary hue of the theme. Load a preset, like gradio.themes.colors.green (or just the string "green"), or pass your own gradio.themes.utils.Color object. + neutral_hue: The neutral hue of the theme, used . Load a preset, like gradio.themes.colors.green (or just the string "green"), or pass your own gradio.themes.utils.Color object. + text_size: The size of the text. Load a preset, like gradio.themes.sizes.text_sm (or just the string "sm"), or pass your own gradio.themes.utils.Size object. + spacing_size: The size of the spacing. Load a preset, like gradio.themes.sizes.spacing_sm (or just the string "sm"), or pass your own gradio.themes.utils.Size object. + radius_size: The radius size of corners. Load a preset, like gradio.themes.sizes.radius_sm (or just the string "sm"), or pass your own gradio.themes.utils.Size object. + font: The primary font to use for the theme. Pass a string for a system font, or a gradio.themes.font.GoogleFont object to load a font from Google Fonts. Pass a list of fonts for fallbacks. + font_mono: The monospace font to use for the theme, applies to code. Pass a string for a system font, or a gradio.themes.font.GoogleFont object to load a font from Google Fonts. Pass a list of fonts for fallbacks. + """ + + self.name = "base" + + def expand_shortcut(shortcut, mode="color", prefix=None): + if not isinstance(shortcut, str): + return shortcut + if mode == "color": + for color in colors.Color.all: + if color.name == shortcut: + return color + raise ValueError(f"Color shortcut {shortcut} not found.") + elif mode == "size": + for size in sizes.Size.all: + if size.name == f"{prefix}_{shortcut}": + return size + raise ValueError(f"Size shortcut {shortcut} not found.") + + primary_hue = expand_shortcut(primary_hue, mode="color") + secondary_hue = expand_shortcut(secondary_hue, mode="color") + neutral_hue = expand_shortcut(neutral_hue, mode="color") + text_size = expand_shortcut(text_size, mode="size", prefix="text") + spacing_size = expand_shortcut(spacing_size, mode="size", prefix="spacing") + radius_size = expand_shortcut(radius_size, mode="size", prefix="radius") + + # Hue ranges + self.primary_50 = primary_hue.c50 + self.primary_100 = primary_hue.c100 + self.primary_200 = primary_hue.c200 + self.primary_300 = primary_hue.c300 + self.primary_400 = primary_hue.c400 + self.primary_500 = primary_hue.c500 + self.primary_600 = primary_hue.c600 + self.primary_700 = primary_hue.c700 + self.primary_800 = primary_hue.c800 + self.primary_900 = primary_hue.c900 + self.primary_950 = primary_hue.c950 + + self.secondary_50 = secondary_hue.c50 + self.secondary_100 = secondary_hue.c100 + self.secondary_200 = secondary_hue.c200 + self.secondary_300 = secondary_hue.c300 + self.secondary_400 = secondary_hue.c400 + self.secondary_500 = secondary_hue.c500 + self.secondary_600 = secondary_hue.c600 + self.secondary_700 = secondary_hue.c700 + self.secondary_800 = secondary_hue.c800 + self.secondary_900 = secondary_hue.c900 + self.secondary_950 = secondary_hue.c950 + + self.neutral_50 = neutral_hue.c50 + self.neutral_100 = neutral_hue.c100 + self.neutral_200 = neutral_hue.c200 + self.neutral_300 = neutral_hue.c300 + self.neutral_400 = neutral_hue.c400 + self.neutral_500 = neutral_hue.c500 + self.neutral_600 = neutral_hue.c600 + self.neutral_700 = neutral_hue.c700 + self.neutral_800 = neutral_hue.c800 + self.neutral_900 = neutral_hue.c900 + self.neutral_950 = neutral_hue.c950 + + # Spacing + self.spacing_xxs = spacing_size.xxs + self.spacing_xs = spacing_size.xs + self.spacing_sm = spacing_size.sm + self.spacing_md = spacing_size.md + self.spacing_lg = spacing_size.lg + self.spacing_xl = spacing_size.xl + self.spacing_xxl = spacing_size.xxl + + self.radius_xxs = radius_size.xxs + self.radius_xs = radius_size.xs + self.radius_sm = radius_size.sm + self.radius_md = radius_size.md + self.radius_lg = radius_size.lg + self.radius_xl = radius_size.xl + self.radius_xxl = radius_size.xxl + + self.text_xxs = text_size.xxs + self.text_xs = text_size.xs + self.text_sm = text_size.sm + self.text_md = text_size.md + self.text_lg = text_size.lg + self.text_xl = text_size.xl + self.text_xxl = text_size.xxl + + # Font + if not isinstance(font, Iterable): + font = [font] + self._font = [ + fontfam if isinstance(fontfam, fonts.Font) else fonts.Font(fontfam) + for fontfam in font + ] + if not isinstance(font_mono, Iterable): + font_mono = [font_mono] + self._font_mono = [ + fontfam if isinstance(fontfam, fonts.Font) else fonts.Font(fontfam) + for fontfam in font_mono + ] + self.font = ", ".join(str(font) for font in self._font) + self.font_mono = ", ".join(str(font) for font in self._font_mono) + + self._stylesheets = [] + for font in self._font + self._font_mono: + font_stylesheet = font.stylesheet() + if font_stylesheet: + self._stylesheets.append(font_stylesheet) + + self.set() + + def set( + self, + *, + # Body Attributes: These set set the values for the entire body of the app. + body_background_fill=None, + body_background_fill_dark=None, + body_text_color=None, + body_text_color_dark=None, + body_text_size=None, + body_text_color_subdued=None, + body_text_color_subdued_dark=None, + body_text_weight=None, + embed_radius=None, + # Element Colors: These set the colors for common elements. + background_fill_primary=None, + background_fill_primary_dark=None, + background_fill_secondary=None, + background_fill_secondary_dark=None, + border_color_accent=None, + border_color_accent_dark=None, + border_color_accent_subdued=None, + border_color_accent_subdued_dark=None, + border_color_primary=None, + border_color_primary_dark=None, + color_accent=None, + color_accent_soft=None, + color_accent_soft_dark=None, + # Text: This sets the text styling for text elements. + link_text_color=None, + link_text_color_dark=None, + link_text_color_active=None, + link_text_color_active_dark=None, + link_text_color_hover=None, + link_text_color_hover_dark=None, + link_text_color_visited=None, + link_text_color_visited_dark=None, + prose_text_size=None, + prose_text_weight=None, + prose_header_text_weight=None, + # Shadows: These set the high-level shadow rendering styles. These variables are often referenced by other component-specific shadow variables. + shadow_drop=None, + shadow_drop_lg=None, + shadow_inset=None, + shadow_spread=None, + shadow_spread_dark=None, + # Layout Atoms: These set the style for common layout elements, such as the blocks that wrap components. + block_background_fill=None, + block_background_fill_dark=None, + block_border_color=None, + block_border_color_dark=None, + block_border_width=None, + block_border_width_dark=None, + block_info_text_color=None, + block_info_text_color_dark=None, + block_info_text_size=None, + block_info_text_weight=None, + block_label_background_fill=None, + block_label_background_fill_dark=None, + block_label_border_color=None, + block_label_border_color_dark=None, + block_label_border_width=None, + block_label_border_width_dark=None, + block_label_shadow=None, + block_label_text_color=None, + block_label_text_color_dark=None, + block_label_margin=None, + block_label_padding=None, + block_label_radius=None, + block_label_right_radius=None, + block_label_text_size=None, + block_label_text_weight=None, + block_padding=None, + block_radius=None, + block_shadow=None, + block_shadow_dark=None, + block_title_background_fill=None, + block_title_background_fill_dark=None, + block_title_border_color=None, + block_title_border_color_dark=None, + block_title_border_width=None, + block_title_border_width_dark=None, + block_title_text_color=None, + block_title_text_color_dark=None, + block_title_padding=None, + block_title_radius=None, + block_title_text_size=None, + block_title_text_weight=None, + container_radius=None, + form_gap_width=None, + layout_gap=None, + panel_background_fill=None, + panel_background_fill_dark=None, + panel_border_color=None, + panel_border_color_dark=None, + panel_border_width=None, + panel_border_width_dark=None, + section_header_text_size=None, + section_header_text_weight=None, + # Component Atoms: These set the style for elements within components. + chatbot_code_background_color=None, + chatbot_code_background_color_dark=None, + checkbox_background_color=None, + checkbox_background_color_dark=None, + checkbox_background_color_focus=None, + checkbox_background_color_focus_dark=None, + checkbox_background_color_hover=None, + checkbox_background_color_hover_dark=None, + checkbox_background_color_selected=None, + checkbox_background_color_selected_dark=None, + checkbox_border_color=None, + checkbox_border_color_dark=None, + checkbox_border_color_focus=None, + checkbox_border_color_focus_dark=None, + checkbox_border_color_hover=None, + checkbox_border_color_hover_dark=None, + checkbox_border_color_selected=None, + checkbox_border_color_selected_dark=None, + checkbox_border_radius=None, + checkbox_border_width=None, + checkbox_border_width_dark=None, + checkbox_check=None, + radio_circle=None, + checkbox_shadow=None, + checkbox_label_background_fill=None, + checkbox_label_background_fill_dark=None, + checkbox_label_background_fill_hover=None, + checkbox_label_background_fill_hover_dark=None, + checkbox_label_background_fill_selected=None, + checkbox_label_background_fill_selected_dark=None, + checkbox_label_border_color=None, + checkbox_label_border_color_dark=None, + checkbox_label_border_color_hover=None, + checkbox_label_border_color_hover_dark=None, + checkbox_label_border_width=None, + checkbox_label_border_width_dark=None, + checkbox_label_gap=None, + checkbox_label_padding=None, + checkbox_label_shadow=None, + checkbox_label_text_size=None, + checkbox_label_text_weight=None, + checkbox_label_text_color=None, + checkbox_label_text_color_dark=None, + checkbox_label_text_color_selected=None, + checkbox_label_text_color_selected_dark=None, + error_background_fill=None, + error_background_fill_dark=None, + error_border_color=None, + error_border_color_dark=None, + error_border_width=None, + error_border_width_dark=None, + error_text_color=None, + error_text_color_dark=None, + error_icon_color=None, + error_icon_color_dark=None, + input_background_fill=None, + input_background_fill_dark=None, + input_background_fill_focus=None, + input_background_fill_focus_dark=None, + input_background_fill_hover=None, + input_background_fill_hover_dark=None, + input_border_color=None, + input_border_color_dark=None, + input_border_color_focus=None, + input_border_color_focus_dark=None, + input_border_color_hover=None, + input_border_color_hover_dark=None, + input_border_width=None, + input_border_width_dark=None, + input_padding=None, + input_placeholder_color=None, + input_placeholder_color_dark=None, + input_radius=None, + input_shadow=None, + input_shadow_dark=None, + input_shadow_focus=None, + input_shadow_focus_dark=None, + input_text_size=None, + input_text_weight=None, + loader_color=None, + loader_color_dark=None, + slider_color=None, + slider_color_dark=None, + stat_background_fill=None, + stat_background_fill_dark=None, + table_border_color=None, + table_border_color_dark=None, + table_even_background_fill=None, + table_even_background_fill_dark=None, + table_odd_background_fill=None, + table_odd_background_fill_dark=None, + table_radius=None, + table_row_focus=None, + table_row_focus_dark=None, + # Buttons: These set the style for buttons. + button_border_width=None, + button_border_width_dark=None, + button_shadow=None, + button_shadow_active=None, + button_shadow_hover=None, + button_transition=None, + button_large_padding=None, + button_large_radius=None, + button_large_text_size=None, + button_large_text_weight=None, + button_small_padding=None, + button_small_radius=None, + button_small_text_size=None, + button_small_text_weight=None, + button_primary_background_fill=None, + button_primary_background_fill_dark=None, + button_primary_background_fill_hover=None, + button_primary_background_fill_hover_dark=None, + button_primary_border_color=None, + button_primary_border_color_dark=None, + button_primary_border_color_hover=None, + button_primary_border_color_hover_dark=None, + button_primary_text_color=None, + button_primary_text_color_dark=None, + button_primary_text_color_hover=None, + button_primary_text_color_hover_dark=None, + button_secondary_background_fill=None, + button_secondary_background_fill_dark=None, + button_secondary_background_fill_hover=None, + button_secondary_background_fill_hover_dark=None, + button_secondary_border_color=None, + button_secondary_border_color_dark=None, + button_secondary_border_color_hover=None, + button_secondary_border_color_hover_dark=None, + button_secondary_text_color=None, + button_secondary_text_color_dark=None, + button_secondary_text_color_hover=None, + button_secondary_text_color_hover_dark=None, + button_cancel_background_fill=None, + button_cancel_background_fill_dark=None, + button_cancel_background_fill_hover=None, + button_cancel_background_fill_hover_dark=None, + button_cancel_border_color=None, + button_cancel_border_color_dark=None, + button_cancel_border_color_hover=None, + button_cancel_border_color_hover_dark=None, + button_cancel_text_color=None, + button_cancel_text_color_dark=None, + button_cancel_text_color_hover=None, + button_cancel_text_color_hover_dark=None, + ) -> Base: + """ + Parameters: + body_background_fill: The background of the entire app. + body_background_fill_dark: The background of the entire app in dark mode. + body_text_color: The default text color. + body_text_color_dark: The default text color in dark mode. + body_text_size: The default text size. + body_text_color_subdued: The text color used for softer, less important text. + body_text_color_subdued_dark: The text color used for softer, less important text in dark mode. + body_text_weight: The default text weight. + embed_radius: The corner radius used for embedding when the app is embedded within a page. + background_fill_primary: The background primarily used for items placed directly on the page. + background_fill_primary_dark: The background primarily used for items placed directly on the page in dark mode. + background_fill_secondary: The background primarily used for items placed on top of another item. + background_fill_secondary_dark: The background primarily used for items placed on top of another item in dark mode. + border_color_accent: The border color used for accented items. + border_color_accent_dark: The border color used for accented items in dark mode. + border_color_accent_subdued: The subdued border color for accented items. + border_color_accent_subdued_dark: The subdued border color for accented items in dark mode. + border_color_primary: The border color primarily used for items placed directly on the page. + border_color_primary_dark: The border color primarily used for items placed directly on the page in dark mode. + color_accent: The color used for accented items. + color_accent_soft: The softer color used for accented items. + color_accent_soft_dark: The softer color used for accented items in dark mode. + link_text_color: The text color used for links. + link_text_color_dark: The text color used for links in dark mode. + link_text_color_active: The text color used for links when they are active. + link_text_color_active_dark: The text color used for links when they are active in dark mode. + link_text_color_hover: The text color used for links when they are hovered over. + link_text_color_hover_dark: The text color used for links when they are hovered over in dark mode. + link_text_color_visited: The text color used for links when they have been visited. + link_text_color_visited_dark: The text color used for links when they have been visited in dark mode. + prose_text_size: The text size used for markdown and other prose. + prose_text_weight: The text weight used for markdown and other prose. + prose_header_text_weight: The text weight of a header used for markdown and other prose. + shadow_drop: Drop shadow used by other shadowed items. + shadow_drop_lg: Larger drop shadow used by other shadowed items. + shadow_inset: Inset shadow used by other shadowed items. + shadow_spread: Size of shadow spread used by shadowed items. + shadow_spread_dark: Size of shadow spread used by shadowed items in dark mode. + block_background_fill: The background around an item. + block_background_fill_dark: The background around an item in dark mode. + block_border_color: The border color around an item. + block_border_color_dark: The border color around an item in dark mode. + block_border_width: The border width around an item. + block_border_width_dark: The border width around an item in dark mode. + block_info_text_color: The color of the info text. + block_info_text_color_dark: The color of the info text in dark mode. + block_info_text_size: The size of the info text. + block_info_text_weight: The weight of the info text. + block_label_background_fill: The background of the title label of a media element (e.g. image). + block_label_background_fill_dark: The background of the title label of a media element (e.g. image) in dark mode. + block_label_border_color: The border color of the title label of a media element (e.g. image). + block_label_border_color_dark: The border color of the title label of a media element (e.g. image) in dark mode. + block_label_border_width: The border width of the title label of a media element (e.g. image). + block_label_border_width_dark: The border width of the title label of a media element (e.g. image) in dark mode. + block_label_shadow: The shadow of the title label of a media element (e.g. image). + block_label_text_color: The text color of the title label of a media element (e.g. image). + block_label_text_color_dark: The text color of the title label of a media element (e.g. image) in dark mode. + block_label_margin: The margin of the title label of a media element (e.g. image) from its surrounding container. + block_label_padding: The padding of the title label of a media element (e.g. image). + block_label_radius: The corner radius of the title label of a media element (e.g. image). + block_label_right_radius: The corner radius of a right-aligned helper label. + block_label_text_size: The text size of the title label of a media element (e.g. image). + block_label_text_weight: The text weight of the title label of a media element (e.g. image). + block_padding: The padding around an item. + block_radius: The corner radius around an item. + block_shadow: The shadow under an item. + block_shadow_dark: The shadow under an item in dark mode. + block_title_background_fill: The background of the title of a form element (e.g. textbox). + block_title_background_fill_dark: The background of the title of a form element (e.g. textbox) in dark mode. + block_title_border_color: The border color of the title of a form element (e.g. textbox). + block_title_border_color_dark: The border color of the title of a form element (e.g. textbox) in dark mode. + block_title_border_width: The border width of the title of a form element (e.g. textbox). + block_title_border_width_dark: The border width of the title of a form element (e.g. textbox) in dark mode. + block_title_text_color: The text color of the title of a form element (e.g. textbox). + block_title_text_color_dark: The text color of the title of a form element (e.g. textbox) in dark mode. + block_title_padding: The padding of the title of a form element (e.g. textbox). + block_title_radius: The corner radius of the title of a form element (e.g. textbox). + block_title_text_size: The text size of the title of a form element (e.g. textbox). + block_title_text_weight: The text weight of the title of a form element (e.g. textbox). + container_radius: The corner radius of a layout component that holds other content. + form_gap_width: The border gap between form elements, (e.g. consecutive textboxes). + layout_gap: The gap between items within a row or column. + panel_background_fill: The background of a panel. + panel_background_fill_dark: The background of a panel in dark mode. + panel_border_color: The border color of a panel. + panel_border_color_dark: The border color of a panel in dark mode. + panel_border_width: The border width of a panel. + panel_border_width_dark: The border width of a panel in dark mode. + section_header_text_size: The text size of a section header (e.g. tab name). + section_header_text_weight: The text weight of a section header (e.g. tab name). + chatbot_code_background_color: The background color of code blocks in the chatbot. + chatbot_code_background_color_dark: The background color of code blocks in the chatbot in dark mode. + checkbox_background_color: The background of a checkbox square or radio circle. + checkbox_background_color_dark: The background of a checkbox square or radio circle in dark mode. + checkbox_background_color_focus: The background of a checkbox square or radio circle when focused. + checkbox_background_color_focus_dark: The background of a checkbox square or radio circle when focused in dark mode. + checkbox_background_color_hover: The background of a checkbox square or radio circle when hovered over. + checkbox_background_color_hover_dark: The background of a checkbox square or radio circle when hovered over in dark mode. + checkbox_background_color_selected: The background of a checkbox square or radio circle when selected. + checkbox_background_color_selected_dark: The background of a checkbox square or radio circle when selected in dark mode. + checkbox_border_color: The border color of a checkbox square or radio circle. + checkbox_border_color_dark: The border color of a checkbox square or radio circle in dark mode. + checkbox_border_color_focus: The border color of a checkbox square or radio circle when focused. + checkbox_border_color_focus_dark: The border color of a checkbox square or radio circle when focused in dark mode. + checkbox_border_color_hover: The border color of a checkbox square or radio circle when hovered over. + checkbox_border_color_hover_dark: The border color of a checkbox square or radio circle when hovered over in dark mode. + checkbox_border_color_selected: The border color of a checkbox square or radio circle when selected. + checkbox_border_color_selected_dark: The border color of a checkbox square or radio circle when selected in dark mode. + checkbox_border_radius: The corner radius of a checkbox square. + checkbox_border_width: The border width of a checkbox square or radio circle. + checkbox_border_width_dark: The border width of a checkbox square or radio circle in dark mode. + checkbox_check: The checkmark visual of a checkbox square. + radio_circle: The circle visual of a radio circle. + checkbox_shadow: The shadow of a checkbox square or radio circle. + checkbox_label_background_fill: The background of the surrounding button of a checkbox or radio element. + checkbox_label_background_fill_dark: The background of the surrounding button of a checkbox or radio element in dark mode. + checkbox_label_background_fill_hover: The background of the surrounding button of a checkbox or radio element when hovered over. + checkbox_label_background_fill_hover_dark: The background of the surrounding button of a checkbox or radio element when hovered over in dark mode. + checkbox_label_background_fill_selected: The background of the surrounding button of a checkbox or radio element when selected. + checkbox_label_background_fill_selected_dark: The background of the surrounding button of a checkbox or radio element when selected in dark mode. + checkbox_label_border_color: The border color of the surrounding button of a checkbox or radio element. + checkbox_label_border_color_dark: The border color of the surrounding button of a checkbox or radio element in dark mode. + checkbox_label_border_color_hover: The border color of the surrounding button of a checkbox or radio element when hovered over. + checkbox_label_border_color_hover_dark: The border color of the surrounding button of a checkbox or radio element when hovered over in dark mode. + checkbox_label_border_width: The border width of the surrounding button of a checkbox or radio element. + checkbox_label_border_width_dark: The border width of the surrounding button of a checkbox or radio element in dark mode. + checkbox_label_gap: The gap consecutive checkbox or radio elements. + checkbox_label_padding: The padding of the surrounding button of a checkbox or radio element. + checkbox_label_shadow: The shadow of the surrounding button of a checkbox or radio element. + checkbox_label_text_size: The text size of the label accompanying a checkbox or radio element. + checkbox_label_text_weight: The text weight of the label accompanying a checkbox or radio element. + checkbox_label_text_color: The text color of the label accompanying a checkbox or radio element. + checkbox_label_text_color_dark: The text color of the label accompanying a checkbox or radio element in dark mode. + checkbox_label_text_color_selected: The text color of the label accompanying a checkbox or radio element when selected. + checkbox_label_text_color_selected_dark: The text color of the label accompanying a checkbox or radio element when selected in dark mode. + error_background_fill: The background of an error message. + error_background_fill_dark: The background of an error message in dark mode. + error_border_color: The border color of an error message. + error_border_color_dark: The border color of an error message in dark mode. + error_border_width: The border width of an error message. + error_border_width_dark: The border width of an error message in dark mode. + error_text_color: The text color of an error message. + error_text_color_dark: The text color of an error message in dark mode. + input_background_fill: The background of an input field. + input_background_fill_dark: The background of an input field in dark mode. + input_background_fill_focus: The background of an input field when focused. + input_background_fill_focus_dark: The background of an input field when focused in dark mode. + input_background_fill_hover: The background of an input field when hovered over. + input_background_fill_hover_dark: The background of an input field when hovered over in dark mode. + input_border_color: The border color of an input field. + input_border_color_dark: The border color of an input field in dark mode. + input_border_color_focus: The border color of an input field when focused. + input_border_color_focus_dark: The border color of an input field when focused in dark mode. + input_border_color_hover: The border color of an input field when hovered over. + input_border_color_hover_dark: The border color of an input field when hovered over in dark mode. + input_border_width: The border width of an input field. + input_border_width_dark: The border width of an input field in dark mode. + input_padding: The padding of an input field. + input_placeholder_color: The placeholder text color of an input field. + input_placeholder_color_dark: The placeholder text color of an input field in dark mode. + input_radius: The corner radius of an input field. + input_shadow: The shadow of an input field. + input_shadow_dark: The shadow of an input field in dark mode. + input_shadow_focus: The shadow of an input field when focused. + input_shadow_focus_dark: The shadow of an input field when focused in dark mode. + input_text_size: The text size of an input field. + input_text_weight: The text weight of an input field. + loader_color: The color of the loading animation while a request is pending. + loader_color_dark: The color of the loading animation while a request is pending in dark mode. + slider_color: The color of the slider in a range element. + slider_color_dark: The color of the slider in a range element in dark mode. + stat_background_fill: The background used for stats visuals (e.g. confidence bars in label). + stat_background_fill_dark: The background used for stats visuals (e.g. confidence bars in label) in dark mode. + table_border_color: The border color of a table. + table_border_color_dark: The border color of a table in dark mode. + table_even_background_fill: The background of even rows in a table. + table_even_background_fill_dark: The background of even rows in a table in dark mode. + table_odd_background_fill: The background of odd rows in a table. + table_odd_background_fill_dark: The background of odd rows in a table in dark mode. + table_radius: The corner radius of a table. + table_row_focus: The background of a focused row in a table. + table_row_focus_dark: The background of a focused row in a table in dark mode. + button_border_width: The border width of a button. + button_border_width_dark: The border width of a button in dark mode. + button_cancel_background_fill: The background of a button of "cancel" variant. + button_cancel_background_fill_dark: The background of a button of "cancel" variant in dark mode. + button_cancel_background_fill_hover: The background of a button of "cancel" variant when hovered over. + button_cancel_background_fill_hover_dark: The background of a button of "cancel" variant when hovered over in dark mode. + button_cancel_border_color: The border color of a button of "cancel" variant. + button_cancel_border_color_dark: The border color of a button of "cancel" variant in dark mode. + button_cancel_border_color_hover: The border color of a button of "cancel" variant when hovered over. + button_cancel_border_color_hover_dark: The border color of a button of "cancel" variant when hovered over in dark mode. + button_cancel_text_color: The text color of a button of "cancel" variant. + button_cancel_text_color_dark: The text color of a button of "cancel" variant in dark mode. + button_cancel_text_color_hover: The text color of a button of "cancel" variant when hovered over. + button_cancel_text_color_hover_dark: The text color of a button of "cancel" variant when hovered over in dark mode. + button_large_padding: The padding of a button with the default "large" size. + button_large_radius: The corner radius of a button with the default "large" size. + button_large_text_size: The text size of a button with the default "large" size. + button_large_text_weight: The text weight of a button with the default "large" size. + button_primary_background_fill: The background of a button of "primary" variant. + button_primary_background_fill_dark: The background of a button of "primary" variant in dark mode. + button_primary_background_fill_hover: The background of a button of "primary" variant when hovered over. + button_primary_background_fill_hover_dark: The background of a button of "primary" variant when hovered over in dark mode. + button_primary_border_color: The border color of a button of "primary" variant. + button_primary_border_color_dark: The border color of a button of "primary" variant in dark mode. + button_primary_border_color_hover: The border color of a button of "primary" variant when hovered over. + button_primary_border_color_hover_dark: The border color of a button of "primary" variant when hovered over in dark mode. + button_primary_text_color: The text color of a button of "primary" variant. + button_primary_text_color_dark: The text color of a button of "primary" variant in dark mode. + button_primary_text_color_hover: The text color of a button of "primary" variant when hovered over. + button_primary_text_color_hover_dark: The text color of a button of "primary" variant when hovered over in dark mode. + button_secondary_background_fill: The background of a button of default "secondary" variant. + button_secondary_background_fill_dark: The background of a button of default "secondary" variant in dark mode. + button_secondary_background_fill_hover: The background of a button of default "secondary" variant when hovered over. + button_secondary_background_fill_hover_dark: The background of a button of default "secondary" variant when hovered over in dark mode. + button_secondary_border_color: The border color of a button of default "secondary" variant. + button_secondary_border_color_dark: The border color of a button of default "secondary" variant in dark mode. + button_secondary_border_color_hover: The border color of a button of default "secondary" variant when hovered over. + button_secondary_border_color_hover_dark: The border color of a button of default "secondary" variant when hovered over in dark mode. + button_secondary_text_color: The text color of a button of default "secondary" variant. + button_secondary_text_color_dark: The text color of a button of default "secondary" variant in dark mode. + button_secondary_text_color_hover: The text color of a button of default "secondary" variant when hovered over. + button_secondary_text_color_hover_dark: The text color of a button of default "secondary" variant when hovered over in dark mode. + button_shadow: The shadow under a button. + button_shadow_active: The shadow under a button when pressed. + button_shadow_hover: The shadow under a button when hovered over. + button_small_padding: The padding of a button set to "small" size. + button_small_radius: The corner radius of a button set to "small" size. + button_small_text_size: The text size of a button set to "small" size. + button_small_text_weight: The text weight of a button set to "small" size. + button_transition: The transition animation duration of a button between regular, hover, and focused states. + """ + + # Body + self.body_background_fill = body_background_fill or getattr( + self, "body_background_fill", "*background_fill_primary" + ) + self.body_background_fill_dark = body_background_fill_dark or getattr( + self, "body_background_fill_dark", "*background_fill_primary" + ) + self.body_text_color = body_text_color or getattr( + self, "body_text_color", "*neutral_800" + ) + self.body_text_color_dark = body_text_color_dark or getattr( + self, "body_text_color_dark", "*neutral_100" + ) + self.body_text_size = body_text_size or getattr( + self, "body_text_size", "*text_md" + ) + self.body_text_weight = body_text_weight or getattr( + self, "body_text_weight", "400" + ) + self.embed_radius = embed_radius or getattr(self, "embed_radius", "*radius_lg") + # Core Colors + self.color_accent = color_accent or getattr( + self, "color_accent", "*primary_500" + ) + self.color_accent_soft = color_accent_soft or getattr( + self, "color_accent_soft", "*primary_50" + ) + self.color_accent_soft_dark = color_accent_soft_dark or getattr( + self, "color_accent_soft_dark", "*neutral_700" + ) + self.background_fill_primary = background_fill_primary or getattr( + self, "background_primary", "white" + ) + self.background_fill_primary_dark = background_fill_primary_dark or getattr( + self, "background_primary_dark", "*neutral_950" + ) + self.background_fill_secondary = background_fill_secondary or getattr( + self, "background_secondary", "*neutral_50" + ) + self.background_fill_secondary_dark = background_fill_secondary_dark or getattr( + self, "background_secondary_dark", "*neutral_900" + ) + self.border_color_accent = border_color_accent or getattr( + self, "border_color_accent", "*primary_300" + ) + self.border_color_accent_dark = border_color_accent_dark or getattr( + self, "border_color_accent_dark", "*neutral_600" + ) + self.border_color_primary = border_color_primary or getattr( + self, "border_color_primary", "*neutral_200" + ) + self.border_color_primary_dark = border_color_primary_dark or getattr( + self, "border_color_primary_dark", "*neutral_700" + ) + # Text Colors + self.link_text_color = link_text_color or getattr( + self, "link_text_color", "*secondary_600" + ) + self.link_text_color_active = link_text_color_active or getattr( + self, "link_text_color_active", "*secondary_600" + ) + self.link_text_color_active_dark = link_text_color_active_dark or getattr( + self, "link_text_color_active_dark", "*secondary_500" + ) + self.link_text_color_dark = link_text_color_dark or getattr( + self, "link_text_color_dark", "*secondary_500" + ) + self.link_text_color_hover = link_text_color_hover or getattr( + self, "link_text_color_hover", "*secondary_700" + ) + self.link_text_color_hover_dark = link_text_color_hover_dark or getattr( + self, "link_text_color_hover_dark", "*secondary_400" + ) + self.link_text_color_visited = link_text_color_visited or getattr( + self, "link_text_color_visited", "*secondary_500" + ) + self.link_text_color_visited_dark = link_text_color_visited_dark or getattr( + self, "link_text_color_visited_dark", "*secondary_600" + ) + self.body_text_color_subdued = body_text_color_subdued or getattr( + self, "body_text_color_subdued", "*neutral_400" + ) + self.body_text_color_subdued_dark = body_text_color_subdued_dark or getattr( + self, "body_text_color_subdued_dark", "*neutral_400" + ) + # Shadows + self.shadow_drop = shadow_drop or getattr( + self, "shadow_drop", "rgba(0,0,0,0.05) 0px 1px 2px 0px" + ) + self.shadow_drop_lg = shadow_drop_lg or getattr( + self, + "shadow_drop_lg", + "0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1)", + ) + self.shadow_inset = shadow_inset or getattr( + self, "shadow_inset", "rgba(0,0,0,0.05) 0px 2px 4px 0px inset" + ) + self.shadow_spread = shadow_spread or getattr(self, "shadow_spread", "3px") + self.shadow_spread_dark = shadow_spread_dark or getattr( + self, "shadow_spread_dark", "1px" + ) + # Layout Atoms + self.block_background_fill = block_background_fill or getattr( + self, "block_background_fill", "*background_fill_primary" + ) + self.block_background_fill_dark = block_background_fill_dark or getattr( + self, "block_background_fill_dark", "*neutral_800" + ) + self.block_border_color = block_border_color or getattr( + self, "block_border_color", "*border_color_primary" + ) + self.block_border_color_dark = block_border_color_dark or getattr( + self, "block_border_color_dark", "*border_color_primary" + ) + self.block_border_width = block_border_width or getattr( + self, "block_border_width", "1px" + ) + self.block_border_width_dark = block_border_width_dark or getattr( + self, "block_border_width_dark", None + ) + self.block_info_text_color = block_info_text_color or getattr( + self, "block_info_text_color", "*body_text_color_subdued" + ) + self.block_info_text_color_dark = block_info_text_color_dark or getattr( + self, "block_info_text_color_dark", "*body_text_color_subdued" + ) + self.block_info_text_size = block_info_text_size or getattr( + self, "block_info_text_size", "*text_sm" + ) + self.block_info_text_weight = block_info_text_weight or getattr( + self, "block_info_text_weight", "400" + ) + self.block_label_background_fill = block_label_background_fill or getattr( + self, "block_label_background_fill", "*background_fill_primary" + ) + self.block_label_background_fill_dark = ( + block_label_background_fill_dark + or getattr( + self, "block_label_background_fill_dark", "*background_fill_secondary" + ) + ) + self.block_label_border_color = block_label_border_color or getattr( + self, "block_label_border_color", "*border_color_primary" + ) + self.block_label_border_color_dark = block_label_border_color_dark or getattr( + self, "block_label_border_color_dark", "*border_color_primary" + ) + self.block_label_border_width = block_label_border_width or getattr( + self, "block_label_border_width", "1px" + ) + self.block_label_border_width_dark = block_label_border_width_dark or getattr( + self, "block_label_border_width_dark", None + ) + self.block_label_shadow = block_label_shadow or getattr( + self, "block_label_shadow", "*block_shadow" + ) + self.block_label_text_color = block_label_text_color or getattr( + self, "block_label_text_color", "*neutral_500" + ) + self.block_label_text_color_dark = block_label_text_color_dark or getattr( + self, "block_label_text_color_dark", "*neutral_200" + ) + self.block_label_margin = block_label_margin or getattr( + self, "block_label_margin", "0" + ) + self.block_label_padding = block_label_padding or getattr( + self, "block_label_padding", "*spacing_sm *spacing_lg" + ) + self.block_label_radius = block_label_radius or getattr( + self, + "block_label_radius", + "calc(*radius_lg - 1px) 0 calc(*radius_lg - 1px) 0", + ) + self.block_label_right_radius = block_label_right_radius or getattr( + self, + "block_label_right_radius", + "0 calc(*radius_lg - 1px) 0 calc(*radius_lg - 1px)", + ) + self.block_label_text_size = block_label_text_size or getattr( + self, "block_label_text_size", "*text_sm" + ) + self.block_label_text_weight = block_label_text_weight or getattr( + self, "block_label_text_weight", "400" + ) + self.block_padding = block_padding or getattr( + self, "block_padding", "*spacing_xl calc(*spacing_xl + 2px)" + ) + self.block_radius = block_radius or getattr(self, "block_radius", "*radius_lg") + self.block_shadow = block_shadow or getattr(self, "block_shadow", "none") + self.block_shadow_dark = block_shadow_dark or getattr( + self, "block_shadow_dark", None + ) + self.block_title_background_fill = block_title_background_fill or getattr( + self, "block_title_background_fill", "none" + ) + self.block_title_background_fill_dark = ( + block_title_background_fill_dark + or getattr(self, "block_title_background_fill_dark", None) + ) + self.block_title_border_color = block_title_border_color or getattr( + self, "block_title_border_color", "none" + ) + self.block_title_border_color_dark = block_title_border_color_dark or getattr( + self, "block_title_border_color_dark", None + ) + self.block_title_border_width = block_title_border_width or getattr( + self, "block_title_border_width", "0px" + ) + self.block_title_border_width_dark = block_title_border_width_dark or getattr( + self, "block_title_border_width_dark", None + ) + self.block_title_text_color = block_title_text_color or getattr( + self, "block_title_text_color", "*neutral_500" + ) + self.block_title_text_color_dark = block_title_text_color_dark or getattr( + self, "block_title_text_color_dark", "*neutral_200" + ) + self.block_title_padding = block_title_padding or getattr( + self, "block_title_padding", "0" + ) + self.block_title_radius = block_title_radius or getattr( + self, "block_title_radius", "none" + ) + self.block_title_text_size = block_title_text_size or getattr( + self, "block_title_text_size", "*text_md" + ) + self.block_title_text_weight = block_title_text_weight or getattr( + self, "block_title_text_weight", "400" + ) + self.container_radius = container_radius or getattr( + self, "container_radius", "*radius_lg" + ) + self.form_gap_width = form_gap_width or getattr(self, "form_gap_width", "0px") + self.layout_gap = layout_gap or getattr(self, "layout_gap", "*spacing_xxl") + self.panel_background_fill = panel_background_fill or getattr( + self, "panel_background_fill", "*background_fill_secondary" + ) + self.panel_background_fill_dark = panel_background_fill_dark or getattr( + self, "panel_background_fill_dark", "*background_fill_secondary" + ) + self.panel_border_color = panel_border_color or getattr( + self, "panel_border_color", "*border_color_primary" + ) + self.panel_border_color_dark = panel_border_color_dark or getattr( + self, "panel_border_color_dark", "*border_color_primary" + ) + self.panel_border_width = panel_border_width or getattr( + self, "panel_border_width", "0" + ) + self.panel_border_width_dark = panel_border_width_dark or getattr( + self, "panel_border_width_dark", None + ) + self.section_header_text_size = section_header_text_size or getattr( + self, "section_header_text_size", "*text_md" + ) + self.section_header_text_weight = section_header_text_weight or getattr( + self, "section_header_text_weight", "400" + ) + self.border_color_accent_subdued = border_color_accent_subdued or getattr( + self, "border_color_accent_subdued", "*border_color_accent" + ) + self.border_color_accent_subdued_dark = ( + border_color_accent_subdued_dark + or getattr(self, "border_color_accent_subdued_dark", "*border_color_accent") + ) + # Component Atoms + self.chatbot_code_background_color = chatbot_code_background_color or getattr( + self, "chatbot_code_background_color", "*neutral_100" + ) + self.chatbot_code_background_color_dark = ( + chatbot_code_background_color_dark + or getattr(self, "chatbot_code_background_color_dark", "*neutral_800") + ) + self.checkbox_background_color = checkbox_background_color or getattr( + self, "checkbox_background_color", "*background_fill_primary" + ) + self.checkbox_background_color_dark = checkbox_background_color_dark or getattr( + self, "checkbox_background_color_dark", "*neutral_800" + ) + self.checkbox_background_color_focus = ( + checkbox_background_color_focus + or getattr( + self, "checkbox_background_color_focus", "*checkbox_background_color" + ) + ) + self.checkbox_background_color_focus_dark = ( + checkbox_background_color_focus_dark + or getattr( + self, + "checkbox_background_color_focus_dark", + "*checkbox_background_color", + ) + ) + self.checkbox_background_color_hover = ( + checkbox_background_color_hover + or getattr( + self, "checkbox_background_color_hover", "*checkbox_background_color" + ) + ) + self.checkbox_background_color_hover_dark = ( + checkbox_background_color_hover_dark + or getattr( + self, + "checkbox_background_color_hover_dark", + "*checkbox_background_color", + ) + ) + self.checkbox_background_color_selected = ( + checkbox_background_color_selected + or getattr(self, "checkbox_background_color_selected", "*secondary_600") + ) + self.checkbox_background_color_selected_dark = ( + checkbox_background_color_selected_dark + or getattr( + self, "checkbox_background_color_selected_dark", "*secondary_600" + ) + ) + self.checkbox_border_color = checkbox_border_color or getattr( + self, "checkbox_border_color", "*neutral_300" + ) + self.checkbox_border_color_dark = checkbox_border_color_dark or getattr( + self, "checkbox_border_color_dark", "*neutral_700" + ) + self.checkbox_border_color_focus = checkbox_border_color_focus or getattr( + self, "checkbox_border_color_focus", "*secondary_500" + ) + self.checkbox_border_color_focus_dark = ( + checkbox_border_color_focus_dark + or getattr(self, "checkbox_border_color_focus_dark", "*secondary_500") + ) + self.checkbox_border_color_hover = checkbox_border_color_hover or getattr( + self, "checkbox_border_color_hover", "*neutral_300" + ) + self.checkbox_border_color_hover_dark = ( + checkbox_border_color_hover_dark + or getattr(self, "checkbox_border_color_hover_dark", "*neutral_600") + ) + self.checkbox_border_color_selected = checkbox_border_color_selected or getattr( + self, "checkbox_border_color_selected", "*secondary_600" + ) + self.checkbox_border_color_selected_dark = ( + checkbox_border_color_selected_dark + or getattr(self, "checkbox_border_color_selected_dark", "*secondary_600") + ) + self.checkbox_border_radius = checkbox_border_radius or getattr( + self, "checkbox_border_radius", "*radius_sm" + ) + self.checkbox_border_width = checkbox_border_width or getattr( + self, "checkbox_border_width", "*input_border_width" + ) + self.checkbox_border_width_dark = checkbox_border_width_dark or getattr( + self, "checkbox_border_width_dark", "*input_border_width" + ) + self.checkbox_label_background_fill = checkbox_label_background_fill or getattr( + self, "checkbox_label_background_fill", "*button_secondary_background_fill" + ) + self.checkbox_label_background_fill_dark = ( + checkbox_label_background_fill_dark + or getattr( + self, + "checkbox_label_background_fill_dark", + "*button_secondary_background_fill", + ) + ) + self.checkbox_label_background_fill_hover = ( + checkbox_label_background_fill_hover + or getattr( + self, + "checkbox_label_background_fill_hover", + "*button_secondary_background_fill_hover", + ) + ) + self.checkbox_label_background_fill_hover_dark = ( + checkbox_label_background_fill_hover_dark + or getattr( + self, + "checkbox_label_background_fill_hover_dark", + "*button_secondary_background_fill_hover", + ) + ) + self.checkbox_label_background_fill_selected = ( + checkbox_label_background_fill_selected + or getattr( + self, + "checkbox_label_background_fill_selected", + "*checkbox_label_background_fill", + ) + ) + self.checkbox_label_background_fill_selected_dark = ( + checkbox_label_background_fill_selected_dark + or getattr( + self, + "checkbox_label_background_fill_selected_dark", + "*checkbox_label_background_fill", + ) + ) + self.checkbox_label_border_color = checkbox_label_border_color or getattr( + self, "checkbox_label_border_color", "*border_color_primary" + ) + self.checkbox_label_border_color_dark = ( + checkbox_label_border_color_dark + or getattr( + self, "checkbox_label_border_color_dark", "*border_color_primary" + ) + ) + self.checkbox_label_border_color_hover = ( + checkbox_label_border_color_hover + or getattr( + self, + "checkbox_label_border_color_hover", + "*checkbox_label_border_color", + ) + ) + self.checkbox_label_border_color_hover_dark = ( + checkbox_label_border_color_hover_dark + or getattr( + self, + "checkbox_label_border_color_hover_dark", + "*checkbox_label_border_color", + ) + ) + self.checkbox_label_border_width = checkbox_label_border_width or getattr( + self, "checkbox_label_border_width", "*input_border_width" + ) + self.checkbox_label_border_width_dark = ( + checkbox_label_border_width_dark + or getattr(self, "checkbox_label_border_width_dark", "*input_border_width") + ) + self.checkbox_label_gap = checkbox_label_gap or getattr( + self, "checkbox_label_gap", "*spacing_lg" + ) + self.checkbox_label_padding = checkbox_label_padding or getattr( + self, "checkbox_label_padding", "*spacing_md calc(2 * *spacing_md)" + ) + self.checkbox_label_shadow = checkbox_label_shadow or getattr( + self, "checkbox_label_shadow", "none" + ) + self.checkbox_label_text_size = checkbox_label_text_size or getattr( + self, "checkbox_label_text_size", "*text_md" + ) + self.checkbox_label_text_weight = checkbox_label_text_weight or getattr( + self, "checkbox_label_text_weight", "400" + ) + self.checkbox_check = checkbox_check or getattr( + self, + "checkbox_check", + """url("data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M12.207 4.793a1 1 0 010 1.414l-5 5a1 1 0 01-1.414 0l-2-2a1 1 0 011.414-1.414L6.5 9.086l4.293-4.293a1 1 0 011.414 0z'/%3e%3c/svg%3e")""", + ) + self.radio_circle = radio_circle or getattr( + self, + "radio_circle", + """url("data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3ccircle cx='8' cy='8' r='3'/%3e%3c/svg%3e")""", + ) + self.checkbox_shadow = checkbox_shadow or getattr( + self, "checkbox_shadow", "*input_shadow" + ) + self.checkbox_label_text_color = checkbox_label_text_color or getattr( + self, "checkbox_label_text_color", "*body_text_color" + ) + self.checkbox_label_text_color_dark = checkbox_label_text_color_dark or getattr( + self, "checkbox_label_text_color_dark", "*body_text_color" + ) + self.checkbox_label_text_color_selected = ( + checkbox_label_text_color_selected + or getattr( + self, "checkbox_label_text_color_selected", "*checkbox_label_text_color" + ) + ) + self.checkbox_label_text_color_selected_dark = ( + checkbox_label_text_color_selected_dark + or getattr( + self, + "checkbox_label_text_color_selected_dark", + "*checkbox_label_text_color", + ) + ) + self.error_background_fill = error_background_fill or getattr( + self, "error_background_fill", colors.red.c50 + ) + self.error_background_fill_dark = error_background_fill_dark or getattr( + self, "error_background_fill_dark", "*background_fill_primary" + ) + self.error_border_color = error_border_color or getattr( + self, "error_border_color", colors.red.c700 + ) + self.error_border_color_dark = error_border_color_dark or getattr( + self, "error_border_color_dark", colors.red.c500 + ) + self.error_border_width = error_border_width or getattr( + self, "error_border_width", "1px" + ) + self.error_border_width_dark = error_border_width_dark or getattr( + self, "error_border_width_dark", None + ) + self.error_text_color = error_text_color or getattr( + self, "error_text_color", colors.red.c700 + ) + self.error_text_color_dark = error_text_color_dark or getattr( + self, "error_text_color_dark", colors.red.c50 + ) + self.error_icon_color = error_icon_color or getattr( + self, "error_icon_color", colors.red.c700 + ) + self.error_icon_color_dark = error_icon_color_dark or getattr( + self, "error_icon_color_dark", colors.red.c500 + ) + self.input_background_fill = input_background_fill or getattr( + self, "input_background_fill", "*neutral_100" + ) + self.input_background_fill_dark = input_background_fill_dark or getattr( + self, "input_background_fill_dark", "*neutral_700" + ) + self.input_background_fill_focus = input_background_fill_focus or getattr( + self, "input_background_fill_focus", "*secondary_500" + ) + self.input_background_fill_focus_dark = ( + input_background_fill_focus_dark + or getattr(self, "input_background_fill_focus_dark", "*secondary_600") + ) + self.input_background_fill_hover = input_background_fill_hover or getattr( + self, "input_background_fill_hover", "*input_background_fill" + ) + self.input_background_fill_hover_dark = ( + input_background_fill_hover_dark + or getattr( + self, "input_background_fill_hover_dark", "*input_background_fill" + ) + ) + self.input_border_color = input_border_color or getattr( + self, "input_border_color", "*border_color_primary" + ) + self.input_border_color_dark = input_border_color_dark or getattr( + self, "input_border_color_dark", "*border_color_primary" + ) + self.input_border_color_focus = input_border_color_focus or getattr( + self, "input_border_color_focus", "*secondary_300" + ) + self.input_border_color_focus_dark = input_border_color_focus_dark or getattr( + self, "input_border_color_focus_dark", "*neutral_700" + ) + self.input_border_color_hover = input_border_color_hover or getattr( + self, "input_border_color_hover", "*input_border_color" + ) + self.input_border_color_hover_dark = input_border_color_hover_dark or getattr( + self, "input_border_color_hover_dark", "*input_border_color" + ) + self.input_border_width = input_border_width or getattr( + self, "input_border_width", "0px" + ) + self.input_border_width_dark = input_border_width_dark or getattr( + self, "input_border_width_dark", None + ) + self.input_padding = input_padding or getattr( + self, "input_padding", "*spacing_xl" + ) + self.input_placeholder_color = input_placeholder_color or getattr( + self, "input_placeholder_color", "*neutral_400" + ) + self.input_placeholder_color_dark = input_placeholder_color_dark or getattr( + self, "input_placeholder_color_dark", "*neutral_500" + ) + self.input_radius = input_radius or getattr(self, "input_radius", "*radius_lg") + self.input_shadow = input_shadow or getattr(self, "input_shadow", "none") + self.input_shadow_dark = input_shadow_dark or getattr( + self, "input_shadow_dark", None + ) + self.input_shadow_focus = input_shadow_focus or getattr( + self, "input_shadow_focus", "*input_shadow" + ) + self.input_shadow_focus_dark = input_shadow_focus_dark or getattr( + self, "input_shadow_focus_dark", None + ) + self.input_text_size = input_text_size or getattr( + self, "input_text_size", "*text_md" + ) + self.input_text_weight = input_text_weight or getattr( + self, "input_text_weight", "400" + ) + self.loader_color = loader_color or getattr( + self, "loader_color", "*color_accent" + ) + self.loader_color_dark = loader_color_dark or getattr( + self, "loader_color_dark", None + ) + self.prose_text_size = prose_text_size or getattr( + self, "prose_text_size", "*text_md" + ) + self.prose_text_weight = prose_text_weight or getattr( + self, "prose_text_weight", "400" + ) + self.prose_header_text_weight = prose_header_text_weight or getattr( + self, "prose_header_text_weight", "600" + ) + self.slider_color = slider_color or getattr( + self, "slider_color", colors.blue.c600 + ) + self.slider_color_dark = slider_color_dark or getattr( + self, "slider_color_dark", None + ) + self.stat_background_fill = stat_background_fill or getattr( + self, "stat_background_fill", "*primary_300" + ) + self.stat_background_fill_dark = stat_background_fill_dark or getattr( + self, "stat_background_fill_dark", "*primary_500" + ) + self.table_border_color = table_border_color or getattr( + self, "table_border_color", "*neutral_300" + ) + self.table_border_color_dark = table_border_color_dark or getattr( + self, "table_border_color_dark", "*neutral_700" + ) + self.table_even_background_fill = table_even_background_fill or getattr( + self, "table_even_background_fill", "white" + ) + self.table_even_background_fill_dark = ( + table_even_background_fill_dark + or getattr(self, "table_even_background_fill_dark", "*neutral_950") + ) + self.table_odd_background_fill = table_odd_background_fill or getattr( + self, "table_odd_background_fill", "*neutral_50" + ) + self.table_odd_background_fill_dark = table_odd_background_fill_dark or getattr( + self, "table_odd_background_fill_dark", "*neutral_900" + ) + self.table_radius = table_radius or getattr(self, "table_radius", "*radius_lg") + self.table_row_focus = table_row_focus or getattr( + self, "table_row_focus", "*color_accent_soft" + ) + self.table_row_focus_dark = table_row_focus_dark or getattr( + self, "table_row_focus_dark", "*color_accent_soft" + ) + # Buttons + self.button_border_width = button_border_width or getattr( + self, "button_border_width", "*input_border_width" + ) + self.button_border_width_dark = button_border_width_dark or getattr( + self, "button_border_width_dark", "*input_border_width" + ) + self.button_cancel_background_fill = button_cancel_background_fill or getattr( + self, "button_cancel_background_fill", "*button_secondary_background_fill" + ) + self.button_cancel_background_fill_dark = ( + button_cancel_background_fill_dark + or getattr( + self, + "button_cancel_background_fill_dark", + "*button_secondary_background_fill", + ) + ) + self.button_cancel_background_fill_hover = ( + button_cancel_background_fill_hover + or getattr( + self, + "button_cancel_background_fill_hover", + "*button_cancel_background_fill", + ) + ) + self.button_cancel_background_fill_hover_dark = ( + button_cancel_background_fill_hover_dark + or getattr( + self, + "button_cancel_background_fill_hover_dark", + "*button_cancel_background_fill", + ) + ) + self.button_cancel_border_color = button_cancel_border_color or getattr( + self, "button_cancel_border_color", "*button_secondary_border_color" + ) + self.button_cancel_border_color_dark = ( + button_cancel_border_color_dark + or getattr( + self, + "button_cancel_border_color_dark", + "*button_secondary_border_color", + ) + ) + self.button_cancel_border_color_hover = ( + button_cancel_border_color_hover + or getattr( + self, + "button_cancel_border_color_hover", + "*button_cancel_border_color", + ) + ) + self.button_cancel_border_color_hover_dark = ( + button_cancel_border_color_hover_dark + or getattr( + self, + "button_cancel_border_color_hover_dark", + "*button_cancel_border_color", + ) + ) + self.button_cancel_text_color = button_cancel_text_color or getattr( + self, "button_cancel_text_color", "*button_secondary_text_color" + ) + self.button_cancel_text_color_dark = button_cancel_text_color_dark or getattr( + self, "button_cancel_text_color_dark", "*button_secondary_text_color" + ) + self.button_cancel_text_color_hover = button_cancel_text_color_hover or getattr( + self, "button_cancel_text_color_hover", "*button_cancel_text_color" + ) + self.button_cancel_text_color_hover_dark = ( + button_cancel_text_color_hover_dark + or getattr( + self, "button_cancel_text_color_hover_dark", "*button_cancel_text_color" + ) + ) + self.button_large_padding = button_large_padding or getattr( + self, "button_large_padding", "*spacing_lg calc(2 * *spacing_lg)" + ) + self.button_large_radius = button_large_radius or getattr( + self, "button_large_radius", "*radius_lg" + ) + self.button_large_text_size = button_large_text_size or getattr( + self, "button_large_text_size", "*text_lg" + ) + self.button_large_text_weight = button_large_text_weight or getattr( + self, "button_large_text_weight", "600" + ) + self.button_primary_background_fill = button_primary_background_fill or getattr( + self, "button_primary_background_fill", "*primary_200" + ) + self.button_primary_background_fill_dark = ( + button_primary_background_fill_dark + or getattr(self, "button_primary_background_fill_dark", "*primary_700") + ) + self.button_primary_background_fill_hover = ( + button_primary_background_fill_hover + or getattr( + self, + "button_primary_background_fill_hover", + "*button_primary_background_fill", + ) + ) + self.button_primary_background_fill_hover_dark = ( + button_primary_background_fill_hover_dark + or getattr( + self, + "button_primary_background_fill_hover_dark", + "*button_primary_background_fill", + ) + ) + self.button_primary_border_color = button_primary_border_color or getattr( + self, "button_primary_border_color", "*primary_200" + ) + self.button_primary_border_color_dark = ( + button_primary_border_color_dark + or getattr(self, "button_primary_border_color_dark", "*primary_600") + ) + self.button_primary_border_color_hover = ( + button_primary_border_color_hover + or getattr( + self, + "button_primary_border_color_hover", + "*button_primary_border_color", + ) + ) + self.button_primary_border_color_hover_dark = ( + button_primary_border_color_hover_dark + or getattr( + self, + "button_primary_border_color_hover_dark", + "*button_primary_border_color", + ) + ) + self.button_primary_text_color = button_primary_text_color or getattr( + self, "button_primary_text_color", "*primary_600" + ) + self.button_primary_text_color_dark = button_primary_text_color_dark or getattr( + self, "button_primary_text_color_dark", "white" + ) + self.button_primary_text_color_hover = ( + button_primary_text_color_hover + or getattr( + self, "button_primary_text_color_hover", "*button_primary_text_color" + ) + ) + self.button_primary_text_color_hover_dark = ( + button_primary_text_color_hover_dark + or getattr( + self, + "button_primary_text_color_hover_dark", + "*button_primary_text_color", + ) + ) + self.button_secondary_background_fill = ( + button_secondary_background_fill + or getattr(self, "button_secondary_background_fill", "*neutral_200") + ) + self.button_secondary_background_fill_dark = ( + button_secondary_background_fill_dark + or getattr(self, "button_secondary_background_fill_dark", "*neutral_600") + ) + self.button_secondary_background_fill_hover = ( + button_secondary_background_fill_hover + or getattr( + self, + "button_secondary_background_fill_hover", + "*button_secondary_background_fill", + ) + ) + self.button_secondary_background_fill_hover_dark = ( + button_secondary_background_fill_hover_dark + or getattr( + self, + "button_secondary_background_fill_hover_dark", + "*button_secondary_background_fill", + ) + ) + self.button_secondary_border_color = button_secondary_border_color or getattr( + self, "button_secondary_border_color", "*neutral_200" + ) + self.button_secondary_border_color_dark = ( + button_secondary_border_color_dark + or getattr(self, "button_secondary_border_color_dark", "*neutral_600") + ) + self.button_secondary_border_color_hover = ( + button_secondary_border_color_hover + or getattr( + self, + "button_secondary_border_color_hover", + "*button_secondary_border_color", + ) + ) + self.button_secondary_border_color_hover_dark = ( + button_secondary_border_color_hover_dark + or getattr( + self, + "button_secondary_border_color_hover_dark", + "*button_secondary_border_color", + ) + ) + self.button_secondary_text_color = button_secondary_text_color or getattr( + self, "button_secondary_text_color", "*neutral_700" + ) + self.button_secondary_text_color_dark = ( + button_secondary_text_color_dark + or getattr(self, "button_secondary_text_color_dark", "white") + ) + self.button_secondary_text_color_hover = ( + button_secondary_text_color_hover + or getattr( + self, + "button_secondary_text_color_hover", + "*button_secondary_text_color", + ) + ) + self.button_secondary_text_color_hover_dark = ( + button_secondary_text_color_hover_dark + or getattr( + self, + "button_secondary_text_color_hover_dark", + "*button_secondary_text_color", + ) + ) + self.button_shadow = button_shadow or getattr(self, "button_shadow", "none") + self.button_shadow_active = button_shadow_active or getattr( + self, "button_shadow_active", "none" + ) + self.button_shadow_hover = button_shadow_hover or getattr( + self, "button_shadow_hover", "none" + ) + self.button_small_padding = button_small_padding or getattr( + self, "button_small_padding", "*spacing_sm calc(2 * *spacing_sm)" + ) + self.button_small_radius = button_small_radius or getattr( + self, "button_small_radius", "*radius_lg" + ) + self.button_small_text_size = button_small_text_size or getattr( + self, "button_small_text_size", "*text_md" + ) + self.button_small_text_weight = button_small_text_weight or getattr( + self, "button_small_text_weight", "400" + ) + self.button_transition = button_transition or getattr( + self, "button_transition", "background-color 0.2s ease" + ) + return self diff --git a/testbed/gradio-app__gradio/gradio/themes/builder_app.py b/testbed/gradio-app__gradio/gradio/themes/builder_app.py new file mode 100644 index 0000000000000000000000000000000000000000..eecee525366cd1c90f745f886da7a77baa341ae4 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/builder_app.py @@ -0,0 +1,1003 @@ +import inspect +import time +from typing import Iterable + +from gradio_client.documentation import document_fn + +import gradio as gr + +themes = [ + gr.themes.Base, + gr.themes.Default, + gr.themes.Soft, + gr.themes.Monochrome, + gr.themes.Glass, +] +colors = gr.themes.Color.all +sizes = gr.themes.Size.all + +palette_range = [50, 100, 200, 300, 400, 500, 600, 700, 800, 900, 950] +size_range = ["xxs", "xs", "sm", "md", "lg", "xl", "xxl"] +docs_theme_core = document_fn(gr.themes.Base.__init__, gr.themes.Base)[1] +docs_theme_vars = document_fn(gr.themes.Base.set, gr.themes.Base)[1] + + +def get_docstr(var): + for parameters in docs_theme_core + docs_theme_vars: + if parameters["name"] == var: + return parameters["doc"] + raise ValueError(f"Variable {var} not found in theme documentation.") + + +def get_doc_theme_var_groups(): + source = inspect.getsource(gr.themes.Base.set) + groups = [] + group, desc, variables, flat_variables = None, None, [], [] + for line in source.splitlines(): + line = line.strip() + if line.startswith(")"): + break + elif line.startswith("# "): + if group is not None: + groups.append((group, desc, variables)) + group, desc = line[2:].split(": ") + variables = [] + elif "=" in line: + var = line.split("=")[0] + variables.append(var) + flat_variables.append(var) + groups.append((group, desc, variables)) + return groups, flat_variables + + +variable_groups, flat_variables = get_doc_theme_var_groups() + +css = """ +.gradio-container { + overflow: visible !important; + max-width: none !important; +} +#controls { + max-height: 100vh; + flex-wrap: unset; + overflow-y: scroll; + position: sticky; + top: 0; +} +#controls::-webkit-scrollbar { + -webkit-appearance: none; + width: 7px; +} + +#controls::-webkit-scrollbar-thumb { + border-radius: 4px; + background-color: rgba(0, 0, 0, .5); + box-shadow: 0 0 1px rgba(255, 255, 255, .5); +} +""" + +with gr.Blocks( # noqa: SIM117 + theme=gr.themes.Base(), + css=css, + title="Gradio Theme Builder", +) as demo: + with gr.Row(): + with gr.Column(scale=1, elem_id="controls", min_width=400): + with gr.Row(): + undo_btn = gr.Button("Undo", size="sm") + dark_mode_btn = gr.Button("Dark Mode", variant="primary", size="sm") + with gr.Tabs(): + with gr.TabItem("Source Theme"): + gr.Markdown( + """ + Select a base theme below you would like to build off of. Note: when you click 'Load Theme', all variable values in other tabs will be overwritten! + """ + ) + base_theme_dropdown = gr.Dropdown( + [theme.__name__ for theme in themes], + value="Base", + show_label=False, + ) + load_theme_btn = gr.Button("Load Theme", elem_id="load_theme") + with gr.TabItem("Core Colors"): + gr.Markdown( + """Set the three hues of the theme: `primary_hue`, `secondary_hue`, and `neutral_hue`. + Each of these is a palette ranging from 50 to 950 in brightness. Pick a preset palette - optionally, open the accordion to overwrite specific values. + Note that these variables do not affect elements directly, but are referenced by other variables with asterisks, such as `*primary_200` or `*neutral_950`.""" + ) + primary_hue = gr.Dropdown( + [color.name for color in colors], label="Primary Hue" + ) + with gr.Accordion(label="Primary Hue Palette", open=False): + primary_hues = [] + for i in palette_range: + primary_hues.append( + gr.ColorPicker( + label=f"primary_{i}", + ) + ) + + secondary_hue = gr.Dropdown( + [color.name for color in colors], label="Secondary Hue" + ) + with gr.Accordion(label="Secondary Hue Palette", open=False): + secondary_hues = [] + for i in palette_range: + secondary_hues.append( + gr.ColorPicker( + label=f"secondary_{i}", + ) + ) + + neutral_hue = gr.Dropdown( + [color.name for color in colors], label="Neutral hue" + ) + with gr.Accordion(label="Neutral Hue Palette", open=False): + neutral_hues = [] + for i in palette_range: + neutral_hues.append( + gr.ColorPicker( + label=f"neutral_{i}", + ) + ) + + with gr.TabItem("Core Sizing"): + gr.Markdown( + """Set the sizing of the theme via: `text_size`, `spacing_size`, and `radius_size`. + Each of these is set to a collection of sizes ranging from `xxs` to `xxl`. Pick a preset size collection - optionally, open the accordion to overwrite specific values. + Note that these variables do not affect elements directly, but are referenced by other variables with asterisks, such as `*spacing_xl` or `*text_sm`. + """ + ) + text_size = gr.Dropdown( + [size.name for size in sizes if size.name.startswith("text_")], + label="Text Size", + ) + with gr.Accordion(label="Text Size Range", open=False): + text_sizes = [] + for i in size_range: + text_sizes.append( + gr.Textbox( + label=f"text_{i}", + ) + ) + + spacing_size = gr.Dropdown( + [ + size.name + for size in sizes + if size.name.startswith("spacing_") + ], + label="Spacing Size", + ) + with gr.Accordion(label="Spacing Size Range", open=False): + spacing_sizes = [] + for i in size_range: + spacing_sizes.append( + gr.Textbox( + label=f"spacing_{i}", + ) + ) + + radius_size = gr.Dropdown( + [ + size.name + for size in sizes + if size.name.startswith("radius_") + ], + label="Radius Size", + ) + with gr.Accordion(label="Radius Size Range", open=False): + radius_sizes = [] + for i in size_range: + radius_sizes.append( + gr.Textbox( + label=f"radius_{i}", + ) + ) + + with gr.TabItem("Core Fonts"): + gr.Markdown( + """Set the main `font` and the monospace `font_mono` here. + Set up to 4 values for each (fallbacks in case a font is not available). + Check "Google Font" if font should be loaded from Google Fonts. + """ + ) + gr.Markdown("### Main Font") + main_fonts, main_is_google = [], [] + for i in range(4): + with gr.Row(): + font = gr.Textbox(label=f"Font {i + 1}") + font_is_google = gr.Checkbox(label="Google Font") + main_fonts.append(font) + main_is_google.append(font_is_google) + + mono_fonts, mono_is_google = [], [] + gr.Markdown("### Monospace Font") + for i in range(4): + with gr.Row(): + font = gr.Textbox(label=f"Font {i + 1}") + font_is_google = gr.Checkbox(label="Google Font") + mono_fonts.append(font) + mono_is_google.append(font_is_google) + + theme_var_input = [] + + core_color_suggestions = ( + [f"*primary_{i}" for i in palette_range] + + [f"*secondary_{i}" for i in palette_range] + + [f"*neutral_{i}" for i in palette_range] + ) + + variable_suggestions = { + "fill": core_color_suggestions[:], + "color": core_color_suggestions[:], + "text_size": [f"*text_{i}" for i in size_range], + "radius": [f"*radius_{i}" for i in size_range], + "padding": [f"*spacing_{i}" for i in size_range], + "gap": [f"*spacing_{i}" for i in size_range], + "weight": [ + "100", + "200", + "300", + "400", + "500", + "600", + "700", + "800", + ], + "shadow": ["none"], + "border_width": [], + } + for variable in flat_variables: + if variable.endswith("_dark"): + continue + for style_type in variable_suggestions: + if style_type in variable: + variable_suggestions[style_type].append("*" + variable) + break + + variable_suggestions["fill"], variable_suggestions["color"] = ( + variable_suggestions["fill"] + + variable_suggestions["color"][len(core_color_suggestions) :], + variable_suggestions["color"] + + variable_suggestions["fill"][len(core_color_suggestions) :], + ) + + for group, desc, variables in variable_groups: + with gr.TabItem(group): + gr.Markdown( + desc + + "\nYou can set these to one of the dropdown values, or clear the dropdown to set a custom value." + ) + for variable in variables: + suggestions = [] + for style_type in variable_suggestions: + if style_type in variable: + suggestions = variable_suggestions[style_type][:] + if "*" + variable in suggestions: + suggestions.remove("*" + variable) + break + dropdown = gr.Dropdown( + label=variable, + info=get_docstr(variable), + choices=suggestions, + allow_custom_value=True, + ) + theme_var_input.append(dropdown) + + # App + + with gr.Column(scale=6, elem_id="app"): + with gr.Column(variant="panel"): + gr.Markdown( + """ + # Theme Builder + Welcome to the theme builder. The left panel is where you create the theme. The different aspects of the theme are broken down into different tabs. Here's how to navigate them: + 1. First, set the "Source Theme". This will set the default values that you can override. + 2. Set the "Core Colors", "Core Sizing" and "Core Fonts". These are the core variables that are used to build the rest of the theme. + 3. The rest of the tabs set specific CSS theme variables. These control finer aspects of the UI. Within these theme variables, you can reference the core variables and other theme variables using the variable name preceded by an asterisk, e.g. `*primary_50` or `*body_text_color`. Clear the dropdown to set a custom value. + 4. Once you have finished your theme, click on "View Code" below to see how you can integrate the theme into your app. You can also click on "Upload to Hub" to upload your theme to the Hugging Face Hub, where others can download and use your theme. + """ + ) + with gr.Accordion("View Code", open=False): + output_code = gr.Code(language="python") + with gr.Accordion("Upload to Hub", open=False): + gr.Markdown( + "You can save your theme on the Hugging Face Hub. HF API write token can be found [here](https://huggingface.co/settings/tokens)." + ) + with gr.Row(): + theme_name = gr.Textbox(label="Theme Name") + theme_hf_token = gr.Textbox(label="Hugging Face Write Token") + theme_version = gr.Textbox( + label="Version", + placeholder="Leave blank to automatically update version.", + ) + upload_to_hub_btn = gr.Button("Upload to Hub") + theme_upload_status = gr.Markdown(visible=False) + + gr.Markdown("Below this panel is a dummy app to demo your theme.") + + name = gr.Textbox( + label="Name", + info="Full name, including middle name. No special characters.", + placeholder="John Doe", + value="John Doe", + interactive=True, + ) + + with gr.Row(): + slider1 = gr.Slider(label="Slider 1") + slider2 = gr.Slider(label="Slider 2") + gr.CheckboxGroup(["A", "B", "C"], label="Checkbox Group") + + with gr.Row(): + with gr.Column(variant="panel", scale=1): + gr.Markdown("## Panel 1") + radio = gr.Radio( + ["A", "B", "C"], + label="Radio", + info="Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.", + ) + drop = gr.Dropdown( + ["Option 1", "Option 2", "Option 3"], show_label=False + ) + drop_2 = gr.Dropdown( + ["Option A", "Option B", "Option C"], + multiselect=True, + value=["Option A"], + label="Dropdown", + interactive=True, + ) + check = gr.Checkbox(label="Go") + with gr.Column(variant="panel", scale=2): + img = gr.Image( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/header-image.jpg", + label="Image", + height=320, + ) + with gr.Row(): + go_btn = gr.Button( + "Go", label="Primary Button", variant="primary" + ) + clear_btn = gr.Button( + "Clear", label="Secondary Button", variant="secondary" + ) + + def go(*args): + time.sleep(3) + return "https://gradio-static-files.s3.us-west-2.amazonaws.com/header-image.jpg" + + go_btn.click( + go, + [radio, drop, drop_2, check, name], + img, + api_name=False, + ) + + def clear(): + time.sleep(0.2) + return None + + clear_btn.click(clear, None, img) + + with gr.Row(): + btn1 = gr.Button("Button 1", size="sm") + btn2 = gr.UploadButton(size="sm") + stop_btn = gr.Button( + "Stop", label="Stop Button", variant="stop", size="sm" + ) + + gr.Examples( + examples=[ + [ + "A", + "Option 1", + ["Option B"], + True, + ], + [ + "B", + "Option 2", + ["Option B", "Option C"], + False, + ], + ], + inputs=[radio, drop, drop_2, check], + label="Examples", + ) + + with gr.Row(): + gr.Dataframe(value=[[1, 2, 3], [4, 5, 6], [7, 8, 9]], label="Dataframe") + gr.JSON( + value={"a": 1, "b": 2, "c": {"test": "a", "test2": [1, 2, 3]}}, + label="JSON", + ) + gr.Label(value={"cat": 0.7, "dog": 0.2, "fish": 0.1}) + gr.File() + with gr.Row(): + gr.ColorPicker() + gr.Video( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/world.mp4" + ) + gr.Gallery( + [ + ( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/lion.jpg", + "lion", + ), + ( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/logo.png", + "logo", + ), + ( + "https://gradio-static-files.s3.us-west-2.amazonaws.com/tower.jpg", + "tower", + ), + ], + height="200px", + columns=2, + ) + + with gr.Row(): + with gr.Column(scale=2): + chatbot = gr.Chatbot([("Hello", "Hi")], label="Chatbot") + chat_btn = gr.Button("Add messages") + + def chat(history): + time.sleep(2) + yield [["How are you?", "I am good."]] + + chat_btn.click( + lambda history: history + + [["How are you?", "I am good."]] + + (time.sleep(2) or []), + chatbot, + chatbot, + api_name=False, + ) + with gr.Column(scale=1): + with gr.Accordion("Advanced Settings"): + gr.Markdown("Hello") + gr.Number(label="Chatbot control 1") + gr.Number(label="Chatbot control 2") + gr.Number(label="Chatbot control 3") + + # Event Listeners + + secret_css = gr.Textbox(visible=False) + secret_font = gr.JSON(visible=False) + + demo.load( # doing this via python was not working for some reason, so using this hacky method for now + None, + None, + None, + _js="""() => { + document.head.innerHTML += ""; + let evt_listener = window.setTimeout( + () => { + load_theme_btn = document.querySelector('#load_theme'); + if (load_theme_btn) { + load_theme_btn.click(); + window.clearTimeout(evt_listener); + } + }, + 100 + ); + }""", + api_name=False, + ) + + theme_inputs = ( + [primary_hue, secondary_hue, neutral_hue] + + primary_hues + + secondary_hues + + neutral_hues + + [text_size, spacing_size, radius_size] + + text_sizes + + spacing_sizes + + radius_sizes + + main_fonts + + main_is_google + + mono_fonts + + mono_is_google + + theme_var_input + ) + + def load_theme(theme_name): + theme = [theme for theme in themes if theme.__name__ == theme_name][0] + + parameters = inspect.signature(theme.__init__).parameters + primary_hue = parameters["primary_hue"].default + secondary_hue = parameters["secondary_hue"].default + neutral_hue = parameters["neutral_hue"].default + text_size = parameters["text_size"].default + spacing_size = parameters["spacing_size"].default + radius_size = parameters["radius_size"].default + + theme = theme() + + font = theme._font[:4] + font_mono = theme._font_mono[:4] + font_is_google = [isinstance(f, gr.themes.GoogleFont) for f in font] + font_mono_is_google = [ + isinstance(f, gr.themes.GoogleFont) for f in font_mono + ] + + def pad_to_4(x): + return x + [None] * (4 - len(x)) + + var_output = [] + for variable in flat_variables: + theme_val = getattr(theme, variable) + if theme_val is None and variable.endswith("_dark"): + theme_val = getattr(theme, variable[:-5]) + var_output.append(theme_val) + + return ( + [primary_hue.name, secondary_hue.name, neutral_hue.name] + + primary_hue.expand() + + secondary_hue.expand() + + neutral_hue.expand() + + [text_size.name, spacing_size.name, radius_size.name] + + text_size.expand() + + spacing_size.expand() + + radius_size.expand() + + pad_to_4([f.name for f in font]) + + pad_to_4(font_is_google) + + pad_to_4([f.name for f in font_mono]) + + pad_to_4(font_mono_is_google) + + var_output + ) + + def generate_theme_code( + base_theme, final_theme, core_variables, final_main_fonts, final_mono_fonts + ): + base_theme_name = base_theme + base_theme = [theme for theme in themes if theme.__name__ == base_theme][ + 0 + ]() + + parameters = inspect.signature(base_theme.__init__).parameters + primary_hue = parameters["primary_hue"].default + secondary_hue = parameters["secondary_hue"].default + neutral_hue = parameters["neutral_hue"].default + text_size = parameters["text_size"].default + spacing_size = parameters["spacing_size"].default + radius_size = parameters["radius_size"].default + font = parameters["font"].default + font = [font] if not isinstance(font, Iterable) else font + font = [ + gr.themes.Font(f) if not isinstance(f, gr.themes.Font) else f + for f in font + ] + font_mono = parameters["font_mono"].default + font_mono = ( + [font_mono] if not isinstance(font_mono, Iterable) else font_mono + ) + font_mono = [ + gr.themes.Font(f) if not isinstance(f, gr.themes.Font) else f + for f in font_mono + ] + + core_diffs = {} + specific_core_diffs = {} + core_var_names = [ + "primary_hue", + "secondary_hue", + "neutral_hue", + "text_size", + "spacing_size", + "radius_size", + ] + for value_name, base_value, source_class, final_value in zip( + core_var_names, + [ + primary_hue, + secondary_hue, + neutral_hue, + text_size, + spacing_size, + radius_size, + ], + [ + gr.themes.Color, + gr.themes.Color, + gr.themes.Color, + gr.themes.Size, + gr.themes.Size, + gr.themes.Size, + ], + core_variables, + ): + if base_value.name != final_value: + core_diffs[value_name] = final_value + source_obj = [ + obj for obj in source_class.all if obj.name == final_value + ][0] + final_attr_values = {} + diff = False + for attr in dir(source_obj): + if attr in ["all", "name", "expand"] or attr.startswith("_"): + continue + final_theme_attr = ( + value_name.split("_")[0] + + "_" + + (attr[1:] if source_class == gr.themes.Color else attr) + ) + final_attr_values[final_theme_attr] = getattr( + final_theme, final_theme_attr + ) + if getattr(source_obj, attr) != final_attr_values[final_theme_attr]: + diff = True + if diff: + specific_core_diffs[value_name] = (source_class, final_attr_values) + + font_diffs = {} + + final_main_fonts = [font for font in final_main_fonts if font[0]] + final_mono_fonts = [font for font in final_mono_fonts if font[0]] + font = font[:4] + font_mono = font_mono[:4] + for base_font_set, theme_font_set, font_set_name in [ + (font, final_main_fonts, "font"), + (font_mono, final_mono_fonts, "font_mono"), + ]: + if len(base_font_set) != len(theme_font_set) or any( + base_font.name != theme_font[0] + or isinstance(base_font, gr.themes.GoogleFont) != theme_font[1] + for base_font, theme_font in zip(base_font_set, theme_font_set) + ): + font_diffs[font_set_name] = [ + f"gr.themes.GoogleFont('{font_name}')" + if is_google_font + else f"'{font_name}'" + for font_name, is_google_font in theme_font_set + ] + + newline = "\n" + + core_diffs_code = "" + if len(core_diffs) + len(specific_core_diffs) > 0: + for var_name in core_var_names: + if var_name in specific_core_diffs: + cls, vals = specific_core_diffs[var_name] + core_diffs_code += f""" {var_name}=gr.themes.{cls.__name__}({', '.join(f'''{k}="{v}"''' for k, v in vals.items())}),\n""" + elif var_name in core_diffs: + core_diffs_code += ( + f""" {var_name}="{core_diffs[var_name]}",\n""" + ) + + font_diffs_code = "" + + if len(font_diffs) > 0: + font_diffs_code = "".join( + [ + f""" {font_set_name}=[{", ".join(fonts)}],\n""" + for font_set_name, fonts in font_diffs.items() + ] + ) + var_diffs = {} + for variable in flat_variables: + base_theme_val = getattr(base_theme, variable) + final_theme_val = getattr(final_theme, variable) + if base_theme_val is None and variable.endswith("_dark"): + base_theme_val = getattr(base_theme, variable[:-5]) + if base_theme_val != final_theme_val: + var_diffs[variable] = getattr(final_theme, variable) + + newline = "\n" + + vars_diff_code = "" + if len(var_diffs) > 0: + vars_diff_code = f""".set( + {(',' + newline + " ").join([f"{k}='{v}'" for k, v in var_diffs.items()])} +)""" + + output = f""" +import gradio as gr + +theme = gr.themes.{base_theme_name}({newline if core_diffs_code or font_diffs_code else ""}{core_diffs_code}{font_diffs_code}){vars_diff_code} + +with gr.Blocks(theme=theme) as demo: + ...""" + return output + + history = gr.State([]) + current_theme = gr.State(None) + + def render_variables(history, base_theme, *args): + primary_hue, secondary_hue, neutral_hue = args[0:3] + primary_hues = args[3 : 3 + len(palette_range)] + secondary_hues = args[3 + len(palette_range) : 3 + 2 * len(palette_range)] + neutral_hues = args[3 + 2 * len(palette_range) : 3 + 3 * len(palette_range)] + text_size, spacing_size, radius_size = args[ + 3 + 3 * len(palette_range) : 6 + 3 * len(palette_range) + ] + text_sizes = args[ + 6 + + 3 * len(palette_range) : 6 + + 3 * len(palette_range) + + len(size_range) + ] + spacing_sizes = args[ + 6 + + 3 * len(palette_range) + + len(size_range) : 6 + + 3 * len(palette_range) + + 2 * len(size_range) + ] + radius_sizes = args[ + 6 + + 3 * len(palette_range) + + 2 * len(size_range) : 6 + + 3 * len(palette_range) + + 3 * len(size_range) + ] + main_fonts = args[ + 6 + + 3 * len(palette_range) + + 3 * len(size_range) : 6 + + 3 * len(palette_range) + + 3 * len(size_range) + + 4 + ] + main_is_google = args[ + 6 + + 3 * len(palette_range) + + 3 * len(size_range) + + 4 : 6 + + 3 * len(palette_range) + + 3 * len(size_range) + + 8 + ] + mono_fonts = args[ + 6 + + 3 * len(palette_range) + + 3 * len(size_range) + + 8 : 6 + + 3 * len(palette_range) + + 3 * len(size_range) + + 12 + ] + mono_is_google = args[ + 6 + + 3 * len(palette_range) + + 3 * len(size_range) + + 12 : 6 + + 3 * len(palette_range) + + 3 * len(size_range) + + 16 + ] + remaining_args = args[ + 6 + 3 * len(palette_range) + 3 * len(size_range) + 16 : + ] + + final_primary_color = gr.themes.Color(*primary_hues) + final_secondary_color = gr.themes.Color(*secondary_hues) + final_neutral_color = gr.themes.Color(*neutral_hues) + final_text_size = gr.themes.Size(*text_sizes) + final_spacing_size = gr.themes.Size(*spacing_sizes) + final_radius_size = gr.themes.Size(*radius_sizes) + + final_main_fonts = [] + font_weights = set() + for attr, val in zip(flat_variables, remaining_args): + if "weight" in attr: + font_weights.add(val) + font_weights = sorted(font_weights) + + for main_font, is_google in zip(main_fonts, main_is_google): + if not main_font: + continue + if is_google: + main_font = gr.themes.GoogleFont(main_font, weights=font_weights) + final_main_fonts.append(main_font) + final_mono_fonts = [] + for mono_font, is_google in zip(mono_fonts, mono_is_google): + if not mono_font: + continue + if is_google: + mono_font = gr.themes.GoogleFont(mono_font, weights=font_weights) + final_mono_fonts.append(mono_font) + + theme = gr.themes.Base( + primary_hue=final_primary_color, + secondary_hue=final_secondary_color, + neutral_hue=final_neutral_color, + text_size=final_text_size, + spacing_size=final_spacing_size, + radius_size=final_radius_size, + font=final_main_fonts, + font_mono=final_mono_fonts, + ) + + theme.set(**dict(zip(flat_variables, remaining_args))) + new_step = (base_theme, args) + if len(history) == 0 or str(history[-1]) != str(new_step): + history.append(new_step) + + return ( + history, + theme._get_theme_css(), + theme._stylesheets, + generate_theme_code( + base_theme, + theme, + ( + primary_hue, + secondary_hue, + neutral_hue, + text_size, + spacing_size, + radius_size, + ), + list(zip(main_fonts, main_is_google)), + list(zip(mono_fonts, mono_is_google)), + ), + theme, + ) + + def attach_rerender(evt_listener): + return evt_listener( + render_variables, + [history, base_theme_dropdown] + theme_inputs, + [history, secret_css, secret_font, output_code, current_theme], + api_name=False, + ).then( + None, + [secret_css, secret_font], + None, + _js="""(css, fonts) => { + document.getElementById('theme_css').innerHTML = css; + let existing_font_links = document.querySelectorAll('link[rel="stylesheet"][href^="https://fonts.googleapis.com/css"]'); + existing_font_links.forEach(link => { + if (fonts.includes(link.href)) { + fonts = fonts.filter(font => font != link.href); + } else { + link.remove(); + } + }); + fonts.forEach(font => { + let link = document.createElement('link'); + link.rel = 'stylesheet'; + link.href = font; + document.head.appendChild(link); + }); + }""", + api_name=False, + ) + + def load_color(color_name): + color = [color for color in colors if color.name == color_name][0] + return [getattr(color, f"c{i}") for i in palette_range] + + attach_rerender( + primary_hue.select( + load_color, primary_hue, primary_hues, api_name=False + ).then + ) + attach_rerender( + secondary_hue.select( + load_color, secondary_hue, secondary_hues, api_name=False + ).then + ) + attach_rerender( + neutral_hue.select( + load_color, neutral_hue, neutral_hues, api_name=False + ).then + ) + for hue_set in (primary_hues, secondary_hues, neutral_hues): + for hue in hue_set: + attach_rerender(hue.blur) + + def load_size(size_name): + size = [size for size in sizes if size.name == size_name][0] + return [getattr(size, i) for i in size_range] + + attach_rerender( + text_size.change(load_size, text_size, text_sizes, api_name=False).then + ) + attach_rerender( + spacing_size.change( + load_size, spacing_size, spacing_sizes, api_name=False + ).then + ) + attach_rerender( + radius_size.change( + load_size, radius_size, radius_sizes, api_name=False + ).then + ) + + attach_rerender( + load_theme_btn.click( + load_theme, base_theme_dropdown, theme_inputs, api_name=False + ).then + ) + + for theme_box in ( + text_sizes + spacing_sizes + radius_sizes + main_fonts + mono_fonts + ): + attach_rerender(theme_box.blur) + attach_rerender(theme_box.submit) + for theme_box in theme_var_input: + attach_rerender(theme_box.blur) + attach_rerender(theme_box.select) + for checkbox in main_is_google + mono_is_google: + attach_rerender(checkbox.select) + + dark_mode_btn.click( + None, + None, + None, + _js="""() => { + if (document.querySelectorAll('.dark').length) { + document.querySelectorAll('.dark').forEach(el => el.classList.remove('dark')); + } else { + document.querySelector('body').classList.add('dark'); + } + }""", + api_name=False, + ) + + def undo(history_var): + if len(history_var) <= 1: + return {history: gr.skip()} + else: + history_var.pop() + old = history_var.pop() + return [history_var, old[0]] + list(old[1]) + + attach_rerender( + undo_btn.click( + undo, + [history], + [history, base_theme_dropdown] + theme_inputs, + api_name=False, + ).then + ) + + def upload_to_hub(data): + try: + theme_url = data[current_theme].push_to_hub( + repo_name=data[theme_name], + version=data[theme_version] or None, + hf_token=data[theme_hf_token], + theme_name=data[theme_name], + ) + space_name = "/".join(theme_url.split("/")[-2:]) + return ( + gr.Markdown.update( + value=f"Theme uploaded [here!]({theme_url})! Load it as `gr.Blocks(theme='{space_name}')`", + visible=True, + ), + "Upload to Hub", + ) + except Exception as e: + return ( + gr.Markdown.update( + value=f"Error: {e}", + visible=True, + ), + "Upload to Hub", + ) + + upload_to_hub_btn.click( + lambda: "Uploading...", + None, + upload_to_hub_btn, + api_name=False, + ).then( + upload_to_hub, + { + current_theme, + theme_name, + theme_hf_token, + theme_version, + }, + [theme_upload_status, upload_to_hub_btn], + api_name=False, + ) + + +if __name__ == "__main__": + demo.launch() diff --git a/testbed/gradio-app__gradio/gradio/themes/default.py b/testbed/gradio-app__gradio/gradio/themes/default.py new file mode 100644 index 0000000000000000000000000000000000000000..5861d02fdceec7b007f462697478d2dbf370d76f --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/default.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +from typing import Iterable + +from gradio.themes.base import Base +from gradio.themes.utils import colors, fonts, sizes + + +class Default(Base): + def __init__( + self, + *, + primary_hue: colors.Color | str = colors.orange, + secondary_hue: colors.Color | str = colors.blue, + neutral_hue: colors.Color | str = colors.gray, + spacing_size: sizes.Size | str = sizes.spacing_md, + radius_size: sizes.Size | str = sizes.radius_md, + text_size: sizes.Size | str = sizes.text_md, + font: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + fonts.GoogleFont("Source Sans Pro"), + "ui-sans-serif", + "system-ui", + "sans-serif", + ), + font_mono: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + fonts.GoogleFont("IBM Plex Mono"), + "ui-monospace", + "Consolas", + "monospace", + ), + ): + super().__init__( + primary_hue=primary_hue, + secondary_hue=secondary_hue, + neutral_hue=neutral_hue, + spacing_size=spacing_size, + radius_size=radius_size, + text_size=text_size, + font=font, + font_mono=font_mono, + ) + self.name = "default" + super().set( + # Colors + input_background_fill_dark="*neutral_800", + error_background_fill=colors.red.c50, + error_background_fill_dark="*neutral_900", + error_border_color=colors.red.c700, + error_border_color_dark=colors.red.c500, + error_icon_color=colors.red.c700, + error_icon_color_dark=colors.red.c500, + # Transition + button_transition="none", + # Shadows + button_shadow="*shadow_drop", + button_shadow_hover="*shadow_drop_lg", + button_shadow_active="*shadow_inset", + input_shadow="0 0 0 *shadow_spread transparent, *shadow_inset", + input_shadow_focus="0 0 0 *shadow_spread *secondary_50, *shadow_inset", + input_shadow_focus_dark="0 0 0 *shadow_spread *neutral_700, *shadow_inset", + checkbox_label_shadow="*shadow_drop", + block_shadow="*shadow_drop", + form_gap_width="1px", + # Button borders + input_border_width="1px", + input_background_fill="white", + # Gradients + stat_background_fill="linear-gradient(to right, *primary_400, *primary_200)", + stat_background_fill_dark="linear-gradient(to right, *primary_400, *primary_600)", + checkbox_label_background_fill="linear-gradient(to top, *neutral_50, white)", + checkbox_label_background_fill_dark="linear-gradient(to top, *neutral_900, *neutral_800)", + checkbox_label_background_fill_hover="linear-gradient(to top, *neutral_100, white)", + checkbox_label_background_fill_hover_dark="linear-gradient(to top, *neutral_900, *neutral_800)", + button_primary_background_fill="linear-gradient(to bottom right, *primary_100, *primary_300)", + button_primary_background_fill_dark="linear-gradient(to bottom right, *primary_500, *primary_600)", + button_primary_background_fill_hover="linear-gradient(to bottom right, *primary_100, *primary_200)", + button_primary_background_fill_hover_dark="linear-gradient(to bottom right, *primary_500, *primary_500)", + button_primary_border_color_dark="*primary_500", + button_secondary_background_fill="linear-gradient(to bottom right, *neutral_100, *neutral_200)", + button_secondary_background_fill_dark="linear-gradient(to bottom right, *neutral_600, *neutral_700)", + button_secondary_background_fill_hover="linear-gradient(to bottom right, *neutral_100, *neutral_100)", + button_secondary_background_fill_hover_dark="linear-gradient(to bottom right, *neutral_600, *neutral_600)", + button_cancel_background_fill=f"linear-gradient(to bottom right, {colors.red.c100}, {colors.red.c200})", + button_cancel_background_fill_dark=f"linear-gradient(to bottom right, {colors.red.c600}, {colors.red.c700})", + button_cancel_background_fill_hover=f"linear-gradient(to bottom right, {colors.red.c100}, {colors.red.c100})", + button_cancel_background_fill_hover_dark=f"linear-gradient(to bottom right, {colors.red.c600}, {colors.red.c600})", + button_cancel_border_color=colors.red.c200, + button_cancel_border_color_dark=colors.red.c600, + button_cancel_text_color=colors.red.c600, + button_cancel_text_color_dark="white", + border_color_accent_subdued="*primary_200", + ) diff --git a/testbed/gradio-app__gradio/gradio/themes/glass.py b/testbed/gradio-app__gradio/gradio/themes/glass.py new file mode 100644 index 0000000000000000000000000000000000000000..f3a93e09b7f2d25ff8b2595761274867fd5da47a --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/glass.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +from typing import Iterable + +from gradio.themes.base import Base +from gradio.themes.utils import colors, fonts, sizes + + +class Glass(Base): + def __init__( + self, + *, + primary_hue: colors.Color | str = colors.stone, + secondary_hue: colors.Color | str = colors.stone, + neutral_hue: colors.Color | str = colors.stone, + spacing_size: sizes.Size | str = sizes.spacing_sm, + radius_size: sizes.Size | str = sizes.radius_sm, + text_size: sizes.Size | str = sizes.text_sm, + font: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + "Optima", + "Candara", + "Noto Sans", + "source-sans-pro", + "sans-serif", + ), + font_mono: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + fonts.GoogleFont("IBM Plex Mono"), + "ui-monospace", + "Consolas", + "monospace", + ), + ): + super().__init__( + primary_hue=primary_hue, + secondary_hue=secondary_hue, + neutral_hue=neutral_hue, + spacing_size=spacing_size, + radius_size=radius_size, + text_size=text_size, + font=font, + font_mono=font_mono, + ) + self.name = "glass" + super().set( + body_background_fill_dark="*primary_800", + background_fill_secondary_dark="*primary_800", + block_background_fill_dark="*primary_800", + button_primary_background_fill="linear-gradient(180deg, *primary_50 0%, *primary_200 50%, *primary_300 50%, *primary_200 100%)", + button_primary_background_fill_hover="linear-gradient(180deg, *primary_100 0%, *primary_200 50%, *primary_300 50%, *primary_200 100%)", + button_primary_background_fill_dark="linear-gradient(180deg, *primary_400 0%, *primary_500 50%, *primary_600 50%, *primary_500 100%)", + button_primary_background_fill_hover_dark="linear-gradient(180deg, *primary_400 0%, *primary_500 50%, *primary_600 50%, *primary_500 100%)", + button_secondary_background_fill="*button_primary_background_fill", + button_secondary_background_fill_hover="*button_primary_background_fill_hover", + button_secondary_background_fill_dark="*button_primary_background_fill", + button_secondary_background_fill_hover_dark="*button_primary_background_fill_hover", + button_cancel_background_fill="*button_primary_background_fill", + button_cancel_background_fill_hover="*button_primary_background_fill_hover", + button_cancel_background_fill_dark="*button_primary_background_fill", + button_cancel_background_fill_hover_dark="*button_primary_background_fill_hover", + button_cancel_border_color="*button_secondary_border_color", + button_cancel_border_color_dark="*button_secondary_border_color", + button_cancel_text_color="*button_secondary_text_color", + checkbox_border_width="0px", + checkbox_label_background_fill="*button_secondary_background_fill", + checkbox_label_background_fill_dark="*button_secondary_background_fill", + checkbox_label_background_fill_hover="*button_secondary_background_fill_hover", + checkbox_label_background_fill_hover_dark="*button_secondary_background_fill_hover", + checkbox_label_border_width="1px", + checkbox_background_color_dark="*primary_600", + button_border_width="1px", + button_shadow_active="*shadow_inset", + input_background_fill="linear-gradient(0deg, *secondary_50 0%, white 100%)", + input_background_fill_dark="*secondary_600", + input_border_color_focus_dark="*primary_400", + input_border_width="1px", + slider_color="*primary_400", + block_label_text_color="*primary_500", + block_title_text_color="*primary_500", + block_label_text_weight="600", + block_title_text_weight="600", + block_label_text_size="*text_md", + block_title_text_size="*text_md", + block_label_background_fill="*primary_200", + block_label_background_fill_dark="*primary_700", + block_border_width="0px", + block_border_width_dark="1px", + panel_border_width="1px", + border_color_primary_dark="*primary_500", + background_fill_primary_dark="*neutral_700", + background_fill_secondary="*primary_100", + block_background_fill="*primary_50", + block_shadow="*primary_400 0px 0px 3px 0px", + table_even_background_fill_dark="*neutral_700", + table_odd_background_fill_dark="*neutral_700", + ) diff --git a/testbed/gradio-app__gradio/gradio/themes/monochrome.py b/testbed/gradio-app__gradio/gradio/themes/monochrome.py new file mode 100644 index 0000000000000000000000000000000000000000..07f8e4f4faf10099f707cacdba54b06b86f72f9b --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/monochrome.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +from typing import Iterable + +from gradio.themes.base import Base +from gradio.themes.utils import colors, fonts, sizes + + +class Monochrome(Base): + def __init__( + self, + *, + primary_hue: colors.Color | str = colors.neutral, + secondary_hue: colors.Color | str = colors.neutral, + neutral_hue: colors.Color | str = colors.neutral, + spacing_size: sizes.Size | str = sizes.spacing_lg, + radius_size: sizes.Size | str = sizes.radius_none, + text_size: sizes.Size | str = sizes.text_md, + font: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + fonts.GoogleFont("Quicksand"), + "ui-sans-serif", + "system-ui", + "sans-serif", + ), + font_mono: fonts.Font + | str + | Iterable[fonts.Font | str] = ( + fonts.GoogleFont("IBM Plex Mono"), + "ui-monospace", + "Consolas", + "monospace", + ), + ): + super().__init__( + primary_hue=primary_hue, + secondary_hue=secondary_hue, + neutral_hue=neutral_hue, + spacing_size=spacing_size, + radius_size=radius_size, + text_size=text_size, + font=font, + font_mono=font_mono, + ) + self.name = "monochrome" + super().set( + # Colors + slider_color="*neutral_900", + slider_color_dark="*neutral_500", + body_text_color="*neutral_900", + block_label_text_color="*body_text_color", + block_title_text_color="*body_text_color", + body_text_color_subdued="*neutral_700", + background_fill_primary_dark="*neutral_900", + background_fill_secondary_dark="*neutral_800", + block_background_fill_dark="*neutral_800", + input_background_fill_dark="*neutral_700", + # Button Colors + button_primary_background_fill="*neutral_900", + button_primary_background_fill_hover="*neutral_700", + button_primary_text_color="white", + button_primary_background_fill_dark="*neutral_600", + button_primary_background_fill_hover_dark="*neutral_600", + button_primary_text_color_dark="white", + button_secondary_background_fill="*button_primary_background_fill", + button_secondary_background_fill_hover="*button_primary_background_fill_hover", + button_secondary_text_color="*button_primary_text_color", + button_cancel_background_fill="*button_primary_background_fill", + button_cancel_background_fill_hover="*button_primary_background_fill_hover", + button_cancel_text_color="*button_primary_text_color", + checkbox_label_background_fill="*button_primary_background_fill", + checkbox_label_background_fill_hover="*button_primary_background_fill_hover", + checkbox_label_text_color="*button_primary_text_color", + checkbox_background_color_selected="*neutral_600", + checkbox_background_color_dark="*neutral_700", + checkbox_background_color_selected_dark="*neutral_700", + checkbox_border_color_selected_dark="*neutral_800", + # Padding + checkbox_label_padding="*spacing_md", + button_large_padding="*spacing_lg", + button_small_padding="*spacing_sm", + # Borders + block_border_width="0px", + block_border_width_dark="1px", + shadow_drop_lg="0 1px 4px 0 rgb(0 0 0 / 0.1)", + block_shadow="*shadow_drop_lg", + block_shadow_dark="none", + # Block Labels + block_title_text_weight="600", + block_label_text_weight="600", + block_label_text_size="*text_md", + ) diff --git a/testbed/gradio-app__gradio/gradio/themes/upload_theme.py b/testbed/gradio-app__gradio/gradio/themes/upload_theme.py new file mode 100644 index 0000000000000000000000000000000000000000..ee11e056d488579e818bc4814d6b13892e6b6e0b --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/upload_theme.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import argparse + +from gradio.themes import ThemeClass + + +def main(): + parser = argparse.ArgumentParser(description="Upload a demo to a space") + parser.add_argument("theme", type=str, help="Theme json file") + parser.add_argument("repo_name", type=str, help="HF repo name to store the theme") + parser.add_argument( + "--org_name", + type=str, + help="The name of the org to save the space in. If None (the default), the username corresponding to the logged in user, or hƒ_token is used.", + ) + parser.add_argument("--version", type=str, help="Semver version") + parser.add_argument("--hf_token", type=str, help="HF Token") + parser.add_argument( + "--theme-name", + type=str, + help="Name of theme.", + ) + parser.add_argument( + "--description", + type=str, + help="Description of theme", + ) + args = parser.parse_args() + upload_theme( + args.theme, + args.repo_name, + args.org_name, + args.version, + args.hf_token, + args.theme_name, + args.description, + ) + + +def upload_theme( + theme: str, + repo_name: str, + org_name: str | None = None, + version: str | None = None, + hf_token: str | None = None, + theme_name: str | None = None, + description: str | None = None, +): + theme = ThemeClass.load(theme) + + return theme.push_to_hub( + repo_name=repo_name, + version=version, + hf_token=hf_token, + theme_name=theme_name, + description=description, + org_name=org_name, + ) diff --git a/testbed/gradio-app__gradio/gradio/themes/utils/__init__.py b/testbed/gradio-app__gradio/gradio/themes/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a3e6208634fafa416b9323f5156ac56dd7bb3700 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/utils/__init__.py @@ -0,0 +1,11 @@ +from .semver_match import ( + ThemeAsset, + get_matching_version, + get_theme_assets, +) + +__all__ = [ + "ThemeAsset", + "get_theme_assets", + "get_matching_version", +] diff --git a/testbed/gradio-app__gradio/gradio/themes/utils/colors.py b/testbed/gradio-app__gradio/gradio/themes/utils/colors.py new file mode 100644 index 0000000000000000000000000000000000000000..6b2d975bdd5245e1cd82bd172ee70a733924d0d8 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/utils/colors.py @@ -0,0 +1,359 @@ +from __future__ import annotations + + +class Color: + all = [] + + def __init__( + self, + c50: str, + c100: str, + c200: str, + c300: str, + c400: str, + c500: str, + c600: str, + c700: str, + c800: str, + c900: str, + c950: str, + name: str | None = None, + ): + self.c50 = c50 + self.c100 = c100 + self.c200 = c200 + self.c300 = c300 + self.c400 = c400 + self.c500 = c500 + self.c600 = c600 + self.c700 = c700 + self.c800 = c800 + self.c900 = c900 + self.c950 = c950 + self.name = name + Color.all.append(self) + + def expand(self) -> list[str]: + return [ + self.c50, + self.c100, + self.c200, + self.c300, + self.c400, + self.c500, + self.c600, + self.c700, + self.c800, + self.c900, + self.c950, + ] + + +slate = Color( + name="slate", + c50="#f8fafc", + c100="#f1f5f9", + c200="#e2e8f0", + c300="#cbd5e1", + c400="#94a3b8", + c500="#64748b", + c600="#475569", + c700="#334155", + c800="#1e293b", + c900="#0f172a", + c950="#0a0f1e", +) +gray = Color( + name="gray", + c50="#f9fafb", + c100="#f3f4f6", + c200="#e5e7eb", + c300="#d1d5db", + c400="#9ca3af", + c500="#6b7280", + c600="#4b5563", + c700="#374151", + c800="#1f2937", + c900="#111827", + c950="#0b0f19", +) +zinc = Color( + name="zinc", + c50="#fafafa", + c100="#f4f4f5", + c200="#e4e4e7", + c300="#d4d4d8", + c400="#a1a1aa", + c500="#71717a", + c600="#52525b", + c700="#3f3f46", + c800="#27272a", + c900="#18181b", + c950="#0f0f11", +) +neutral = Color( + name="neutral", + c50="#fafafa", + c100="#f5f5f5", + c200="#e5e5e5", + c300="#d4d4d4", + c400="#a3a3a3", + c500="#737373", + c600="#525252", + c700="#404040", + c800="#262626", + c900="#171717", + c950="#0f0f0f", +) +stone = Color( + name="stone", + c50="#fafaf9", + c100="#f5f5f4", + c200="#e7e5e4", + c300="#d6d3d1", + c400="#a8a29e", + c500="#78716c", + c600="#57534e", + c700="#44403c", + c800="#292524", + c900="#1c1917", + c950="#0f0e0d", +) +red = Color( + name="red", + c50="#fef2f2", + c100="#fee2e2", + c200="#fecaca", + c300="#fca5a5", + c400="#f87171", + c500="#ef4444", + c600="#dc2626", + c700="#b91c1c", + c800="#991b1b", + c900="#7f1d1d", + c950="#6c1e1e", +) +orange = Color( + name="orange", + c50="#fff7ed", + c100="#ffedd5", + c200="#fed7aa", + c300="#fdba74", + c400="#fb923c", + c500="#f97316", + c600="#ea580c", + c700="#c2410c", + c800="#9a3412", + c900="#7c2d12", + c950="#6c2e12", +) +amber = Color( + name="amber", + c50="#fffbeb", + c100="#fef3c7", + c200="#fde68a", + c300="#fcd34d", + c400="#fbbf24", + c500="#f59e0b", + c600="#d97706", + c700="#b45309", + c800="#92400e", + c900="#78350f", + c950="#6c370f", +) +yellow = Color( + name="yellow", + c50="#fefce8", + c100="#fef9c3", + c200="#fef08a", + c300="#fde047", + c400="#facc15", + c500="#eab308", + c600="#ca8a04", + c700="#a16207", + c800="#854d0e", + c900="#713f12", + c950="#653b12", +) +lime = Color( + name="lime", + c50="#f7fee7", + c100="#ecfccb", + c200="#d9f99d", + c300="#bef264", + c400="#a3e635", + c500="#84cc16", + c600="#65a30d", + c700="#4d7c0f", + c800="#3f6212", + c900="#365314", + c950="#2f4e14", +) +green = Color( + name="green", + c50="#f0fdf4", + c100="#dcfce7", + c200="#bbf7d0", + c300="#86efac", + c400="#4ade80", + c500="#22c55e", + c600="#16a34a", + c700="#15803d", + c800="#166534", + c900="#14532d", + c950="#134e28", +) +emerald = Color( + name="emerald", + c50="#ecfdf5", + c100="#d1fae5", + c200="#a7f3d0", + c300="#6ee7b7", + c400="#34d399", + c500="#10b981", + c600="#059669", + c700="#047857", + c800="#065f46", + c900="#064e3b", + c950="#054436", +) +teal = Color( + name="teal", + c50="#f0fdfa", + c100="#ccfbf1", + c200="#99f6e4", + c300="#5eead4", + c400="#2dd4bf", + c500="#14b8a6", + c600="#0d9488", + c700="#0f766e", + c800="#115e59", + c900="#134e4a", + c950="#12443e", +) +cyan = Color( + name="cyan", + c50="#ecfeff", + c100="#cffafe", + c200="#a5f3fc", + c300="#67e8f9", + c400="#22d3ee", + c500="#06b6d4", + c600="#0891b2", + c700="#0e7490", + c800="#155e75", + c900="#164e63", + c950="#14455c", +) +sky = Color( + name="sky", + c50="#f0f9ff", + c100="#e0f2fe", + c200="#bae6fd", + c300="#7dd3fc", + c400="#38bdf8", + c500="#0ea5e9", + c600="#0284c7", + c700="#0369a1", + c800="#075985", + c900="#0c4a6e", + c950="#0b4165", +) +blue = Color( + name="blue", + c50="#eff6ff", + c100="#dbeafe", + c200="#bfdbfe", + c300="#93c5fd", + c400="#60a5fa", + c500="#3b82f6", + c600="#2563eb", + c700="#1d4ed8", + c800="#1e40af", + c900="#1e3a8a", + c950="#1d3660", +) +indigo = Color( + name="indigo", + c50="#eef2ff", + c100="#e0e7ff", + c200="#c7d2fe", + c300="#a5b4fc", + c400="#818cf8", + c500="#6366f1", + c600="#4f46e5", + c700="#4338ca", + c800="#3730a3", + c900="#312e81", + c950="#2b2c5e", +) +violet = Color( + name="violet", + c50="#f5f3ff", + c100="#ede9fe", + c200="#ddd6fe", + c300="#c4b5fd", + c400="#a78bfa", + c500="#8b5cf6", + c600="#7c3aed", + c700="#6d28d9", + c800="#5b21b6", + c900="#4c1d95", + c950="#431d7f", +) +purple = Color( + name="purple", + c50="#faf5ff", + c100="#f3e8ff", + c200="#e9d5ff", + c300="#d8b4fe", + c400="#c084fc", + c500="#a855f7", + c600="#9333ea", + c700="#7e22ce", + c800="#6b21a8", + c900="#581c87", + c950="#4c1a73", +) +fuchsia = Color( + name="fuchsia", + c50="#fdf4ff", + c100="#fae8ff", + c200="#f5d0fe", + c300="#f0abfc", + c400="#e879f9", + c500="#d946ef", + c600="#c026d3", + c700="#a21caf", + c800="#86198f", + c900="#701a75", + c950="#5e1a66", +) +pink = Color( + name="pink", + c50="#fdf2f8", + c100="#fce7f3", + c200="#fbcfe8", + c300="#f9a8d4", + c400="#f472b6", + c500="#ec4899", + c600="#db2777", + c700="#be185d", + c800="#9d174d", + c900="#831843", + c950="#6e1a3d", +) +rose = Color( + name="rose", + c50="#fff1f2", + c100="#ffe4e6", + c200="#fecdd3", + c300="#fda4af", + c400="#fb7185", + c500="#f43f5e", + c600="#e11d48", + c700="#be123c", + c800="#9f1239", + c900="#881337", + c950="#771d3a", +) diff --git a/testbed/gradio-app__gradio/gradio/themes/utils/fonts.py b/testbed/gradio-app__gradio/gradio/themes/utils/fonts.py new file mode 100644 index 0000000000000000000000000000000000000000..d51dbbfdf4990358e9094cc887c47ae6cd8b0440 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/utils/fonts.py @@ -0,0 +1,50 @@ +from __future__ import annotations + +import json +from typing import Iterable + + +class FontEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, Font): + return { + "__gradio_font__": True, + "name": obj.name, + "class": "google" if isinstance(obj, GoogleFont) else "font", + } + # Let the base class default method raise the TypeError + return json.JSONEncoder.default(self, obj) + + +def as_font(dct): + if "__gradio_font__" in dct: + name = dct["name"] + return GoogleFont(name) if dct["class"] == "google" else Font(name) + return dct + + +class Font: + def __init__(self, name: str): + self.name = name + + def __str__(self) -> str: + return ( + self.name + if self.name in ["sans-serif", "serif", "monospace", "cursive", "fantasy"] + else f"'{self.name}'" + ) + + def stylesheet(self) -> str: + return None + + def __eq__(self, other: Font) -> bool: + return self.name == other.name and self.stylesheet() == other.stylesheet() + + +class GoogleFont(Font): + def __init__(self, name: str, weights: Iterable[int] = (400, 600)): + self.name = name + self.weights = weights + + def stylesheet(self) -> str: + return f'https://fonts.googleapis.com/css2?family={self.name.replace(" ", "+")}:wght@{";".join(str(weight) for weight in self.weights)}&display=swap' diff --git a/testbed/gradio-app__gradio/gradio/themes/utils/readme_content.py b/testbed/gradio-app__gradio/gradio/themes/utils/readme_content.py new file mode 100644 index 0000000000000000000000000000000000000000..93e72696dd8a42dbefb9b778f4e1a274d87919e8 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/utils/readme_content.py @@ -0,0 +1,18 @@ +README_CONTENT = """ +--- +tags: [gradio-theme] +title: {theme_name} +colorFrom: orange +colorTo: purple +sdk: gradio +sdk_version: {gradio_version} +app_file: app.py +pinned: false +license: apache-2.0 +--- +# {theme_name} +## Description +{description} +## Contributions +Thanks to [@{author}](https://huggingface.co/{author}) for adding this gradio theme! +""" diff --git a/testbed/gradio-app__gradio/gradio/themes/utils/semver_match.py b/testbed/gradio-app__gradio/gradio/themes/utils/semver_match.py new file mode 100644 index 0000000000000000000000000000000000000000..25df9265b7a0c5b6714364c1d125d85ea26d3b46 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/utils/semver_match.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from dataclasses import dataclass, field + +import huggingface_hub +import semantic_version +import semantic_version as semver + + +@dataclass +class ThemeAsset: + filename: str + version: semver.Version = field(init=False) + + def __post_init__(self): + self.version = semver.Version(self.filename.split("@")[1].replace(".json", "")) + + +def get_theme_assets(space_info: huggingface_hub.hf_api.SpaceInfo) -> list[ThemeAsset]: + if "gradio-theme" not in getattr(space_info, "tags", []): + raise ValueError(f"{space_info.id} is not a valid gradio-theme space!") + + return [ + ThemeAsset(filename.rfilename) + for filename in space_info.siblings + if filename.rfilename.startswith("themes/") + ] + + +def get_matching_version( + assets: list[ThemeAsset], expression: str | None +) -> ThemeAsset | None: + expression = expression or "*" + + # Return most recent version that matches + matching_version = semantic_version.SimpleSpec(expression).select( + [a.version for a in assets] + ) + + return next((a for a in assets if a.version == matching_version), None) diff --git a/testbed/gradio-app__gradio/gradio/themes/utils/sizes.py b/testbed/gradio-app__gradio/gradio/themes/utils/sizes.py new file mode 100644 index 0000000000000000000000000000000000000000..99ed6b1ce447d638448d4970bde5227eedd53835 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/utils/sizes.py @@ -0,0 +1,132 @@ +from __future__ import annotations + + +class Size: + all = [] + + def __init__( + self, xxs: str, xs: str, sm: str, md: str, lg: str, xl: str, xxl: str, name=None + ): + self.xxs = xxs + self.xs = xs + self.sm = sm + self.md = md + self.lg = lg + self.xl = xl + self.xxl = xxl + self.name = name + Size.all.append(self) + + def expand(self) -> list[str]: + return [self.xxs, self.xs, self.sm, self.md, self.lg, self.xl, self.xxl] + + +radius_none = Size( + name="radius_none", + xxs="0px", + xs="0px", + sm="0px", + md="0px", + lg="0px", + xl="0px", + xxl="0px", +) + +radius_sm = Size( + name="radius_sm", + xxs="1px", + xs="1px", + sm="2px", + md="4px", + lg="6px", + xl="8px", + xxl="12px", +) + +radius_md = Size( + name="radius_md", + xxs="1px", + xs="2px", + sm="4px", + md="6px", + lg="8px", + xl="12px", + xxl="22px", +) + +radius_lg = Size( + name="radius_lg", + xxs="2px", + xs="4px", + sm="6px", + md="8px", + lg="12px", + xl="16px", + xxl="24px", +) + +spacing_sm = Size( + name="spacing_sm", + xxs="1px", + xs="1px", + sm="2px", + md="4px", + lg="6px", + xl="9px", + xxl="12px", +) + +spacing_md = Size( + name="spacing_md", + xxs="1px", + xs="2px", + sm="4px", + md="6px", + lg="8px", + xl="10px", + xxl="16px", +) + +spacing_lg = Size( + name="spacing_lg", + xxs="2px", + xs="4px", + sm="6px", + md="8px", + lg="10px", + xl="14px", + xxl="28px", +) + +text_sm = Size( + name="text_sm", + xxs="8px", + xs="9px", + sm="11px", + md="13px", + lg="16px", + xl="20px", + xxl="24px", +) + +text_md = Size( + name="text_md", + xxs="9px", + xs="10px", + sm="12px", + md="14px", + lg="16px", + xl="22px", + xxl="26px", +) + +text_lg = Size( + name="text_lg", + xxs="10px", + xs="12px", + sm="14px", + md="16px", + lg="20px", + xl="24px", + xxl="28px", +) diff --git a/testbed/gradio-app__gradio/gradio/themes/utils/theme_dropdown.py b/testbed/gradio-app__gradio/gradio/themes/utils/theme_dropdown.py new file mode 100644 index 0000000000000000000000000000000000000000..c3d21bba7784a0b8b4bfd989cd83ccda52c4fdbc --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/themes/utils/theme_dropdown.py @@ -0,0 +1,57 @@ +import os +import pathlib + +from gradio.themes.utils import ThemeAsset + + +def create_theme_dropdown(): + import gradio as gr + + asset_path = pathlib.Path() / "themes" + themes = [] + for theme_asset in os.listdir(str(asset_path)): + themes.append( + (ThemeAsset(theme_asset), gr.Theme.load(str(asset_path / theme_asset))) + ) + + def make_else_if(theme_asset): + return f""" + else if (theme == '{str(theme_asset[0].version)}') {{ + var theme_css = `{theme_asset[1]._get_theme_css()}` + }}""" + + head, tail = themes[0], themes[1:] + if_statement = f""" + if (theme == "{str(head[0].version)}") {{ + var theme_css = `{head[1]._get_theme_css()}` + }} {" ".join(make_else_if(t) for t in tail)} + """ + + latest_to_oldest = sorted([t[0] for t in themes], key=lambda asset: asset.version)[ + ::-1 + ] + latest_to_oldest = [str(t.version) for t in latest_to_oldest] + + component = gr.Dropdown( + choices=latest_to_oldest, + value=latest_to_oldest[0], + render=False, + label="Select Version", + ).style(container=False) + + return ( + component, + f""" + (theme) => {{ + if (!document.querySelector('.theme-css')) {{ + var theme_elem = document.createElement('style'); + theme_elem.classList.add('theme-css'); + document.head.appendChild(theme_elem); + }} else {{ + var theme_elem = document.querySelector('.theme-css'); + }} + {if_statement} + theme_elem.innerHTML = theme_css; + }} + """, + ) diff --git a/testbed/gradio-app__gradio/gradio/tunneling.py b/testbed/gradio-app__gradio/gradio/tunneling.py new file mode 100644 index 0000000000000000000000000000000000000000..7249ff57c7a0ef4610fcf0baf9976629267fa784 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/tunneling.py @@ -0,0 +1,103 @@ +import atexit +import os +import platform +import re +import stat +import subprocess +from pathlib import Path +from typing import List + +import requests + +VERSION = "0.2" +CURRENT_TUNNELS: List["Tunnel"] = [] + +machine = platform.machine() +if machine == "x86_64": + machine = "amd64" + +BINARY_REMOTE_NAME = f"frpc_{platform.system().lower()}_{machine.lower()}" +EXTENSION = ".exe" if os.name == "nt" else "" +BINARY_URL = f"https://cdn-media.huggingface.co/frpc-gradio-{VERSION}/{BINARY_REMOTE_NAME}{EXTENSION}" + +BINARY_FILENAME = f"{BINARY_REMOTE_NAME}_v{VERSION}" +BINARY_FOLDER = Path(__file__).parent +BINARY_PATH = f"{BINARY_FOLDER / BINARY_FILENAME}" + + +class Tunnel: + def __init__(self, remote_host, remote_port, local_host, local_port, share_token): + self.proc = None + self.url = None + self.remote_host = remote_host + self.remote_port = remote_port + self.local_host = local_host + self.local_port = local_port + self.share_token = share_token + + @staticmethod + def download_binary(): + if not Path(BINARY_PATH).exists(): + resp = requests.get(BINARY_URL) + + if resp.status_code == 403: + raise OSError( + f"Cannot set up a share link as this platform is incompatible. Please " + f"create a GitHub issue with information about your platform: {platform.uname()}" + ) + + resp.raise_for_status() + + # Save file data to local copy + with open(BINARY_PATH, "wb") as file: + file.write(resp.content) + st = os.stat(BINARY_PATH) + os.chmod(BINARY_PATH, st.st_mode | stat.S_IEXEC) + + def start_tunnel(self) -> str: + self.download_binary() + self.url = self._start_tunnel(BINARY_PATH) + return self.url + + def kill(self): + if self.proc is not None: + print(f"Killing tunnel {self.local_host}:{self.local_port} <> {self.url}") + self.proc.terminate() + self.proc = None + + def _start_tunnel(self, binary: str) -> str: + CURRENT_TUNNELS.append(self) + command = [ + binary, + "http", + "-n", + self.share_token, + "-l", + str(self.local_port), + "-i", + self.local_host, + "--uc", + "--sd", + "random", + "--ue", + "--server_addr", + f"{self.remote_host}:{self.remote_port}", + "--disable_log_color", + ] + self.proc = subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + atexit.register(self.kill) + url = "" + while url == "": + if self.proc.stdout is None: + continue + line = self.proc.stdout.readline() + line = line.decode("utf-8") + if "start proxy success" in line: + result = re.search("start proxy success: (.+)\n", line) + if result is None: + raise ValueError("Could not create share URL") + else: + url = result.group(1) + return url diff --git a/testbed/gradio-app__gradio/gradio/utils.py b/testbed/gradio-app__gradio/gradio/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3c89ddeae7e21ba474eb99925575d7842584f514 --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/utils.py @@ -0,0 +1,978 @@ +""" Handy utility functions. """ + +from __future__ import annotations + +import asyncio +import copy +import functools +import importlib +import inspect +import json +import json.decoder +import os +import pkgutil +import random +import re +import threading +import time +import traceback +import typing +import warnings +from abc import ABC, abstractmethod +from contextlib import contextmanager +from io import BytesIO +from numbers import Number +from pathlib import Path +from types import GeneratorType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterator, + Optional, + TypeVar, +) + +import anyio +import matplotlib +import requests +from gradio_client.serializing import Serializable +from typing_extensions import ParamSpec + +import gradio +from gradio.context import Context +from gradio.strings import en + +if TYPE_CHECKING: # Only import for type checking (is False at runtime). + from gradio.blocks import Block, BlockContext, Blocks + from gradio.components import Component + from gradio.routes import App, Request + +JSON_PATH = os.path.join(os.path.dirname(gradio.__file__), "launches.json") + +P = ParamSpec("P") +T = TypeVar("T") + + +def get_package_version() -> str: + try: + package_json_data = ( + pkgutil.get_data(__name__, "package.json").decode("utf-8").strip() # type: ignore + ) + package_data = json.loads(package_json_data) + version = package_data.get("version", "") + return version + except Exception: + return "" + + +def safe_get_lock() -> asyncio.Lock: + """Get asyncio.Lock() without fear of getting an Exception. + + Needed because in reload mode we import the Blocks object outside + the main thread. + """ + try: + asyncio.get_event_loop() + return asyncio.Lock() + except RuntimeError: + return None # type: ignore + + +class BaseReloader(ABC): + @property + @abstractmethod + def running_app(self) -> App: + pass + + def queue_changed(self, demo: Blocks): + return ( + hasattr(self.running_app.blocks, "_queue") and not hasattr(demo, "_queue") + ) or ( + not hasattr(self.running_app.blocks, "_queue") and hasattr(demo, "_queue") + ) + + def swap_blocks(self, demo: Blocks): + assert self.running_app.blocks + # Copy over the blocks to get new components and events but + # not a new queue + if hasattr(self.running_app.blocks, "_queue"): + self.running_app.blocks._queue.blocks_dependencies = demo.dependencies + demo._queue = self.running_app.blocks._queue + self.running_app.blocks = demo + + +class SourceFileReloader(BaseReloader): + def __init__( + self, + app: App, + watch_dirs: list[str], + watch_file: str, + stop_event: threading.Event, + change_event: threading.Event, + demo_name: str = "demo", + ) -> None: + super().__init__() + self.app = app + self.watch_dirs = watch_dirs + self.watch_file = watch_file + self.stop_event = stop_event + self.change_event = change_event + self.demo_name = demo_name + + @property + def running_app(self) -> App: + return self.app + + def should_watch(self) -> bool: + return not self.stop_event.is_set() + + def stop(self) -> None: + self.stop_event.set() + + def alert_change(self): + self.change_event.set() + + def swap_blocks(self, demo: Blocks): + super().swap_blocks(demo) + self.alert_change() + + +def watchfn(reloader: SourceFileReloader): + """Watch python files in a given module. + + get_changes is taken from uvicorn's default file watcher. + """ + + # The thread running watchfn will be the thread reloading + # the app. So we need to modify this thread_data attr here + # so that subsequent calls to reload don't launch the app + from gradio.reload import reload_thread + + reload_thread.running_reload = True + + def get_changes() -> Path | None: + for file in iter_py_files(): + try: + mtime = file.stat().st_mtime + except OSError: # pragma: nocover + continue + + old_time = mtimes.get(file) + if old_time is None: + mtimes[file] = mtime + continue + elif mtime > old_time: + return file + return None + + def iter_py_files() -> Iterator[Path]: + for reload_dir in reload_dirs: + for path in list(reload_dir.rglob("*.py")): + yield path.resolve() + + module = None + reload_dirs = [Path(dir_) for dir_ in reloader.watch_dirs] + mtimes = {} + while reloader.should_watch(): + import sys + + changed = get_changes() + if changed: + print(f"Changes detected in: {changed}") + # To simulate a fresh reload, delete all module references from sys.modules + # for the modules in the package the change came from. + dir_ = next(d for d in reload_dirs if is_in_or_equal(changed, d)) + modules = list(sys.modules) + for k in modules: + v = sys.modules[k] + sourcefile = getattr(v, "__file__", None) + # Do not reload `reload.py` to keep thread data + if ( + sourcefile + and dir_ == Path(inspect.getfile(gradio)).parent + and sourcefile.endswith("reload.py") + ): + continue + if sourcefile and is_in_or_equal(sourcefile, dir_): + del sys.modules[k] + try: + module = importlib.import_module(reloader.watch_file) + module = importlib.reload(module) + except Exception as e: + print( + f"Reloading {reloader.watch_file} failed with the following exception: " + ) + traceback.print_exception(None, value=e, tb=None) + mtimes = {} + continue + + demo = getattr(module, reloader.demo_name) + if reloader.queue_changed(demo): + print( + "Reloading failed. The new demo has a queue and the old one doesn't (or vice versa). " + "Please launch your demo again" + ) + else: + reloader.swap_blocks(demo) + mtimes = {} + + +def colab_check() -> bool: + """ + Check if interface is launching from Google Colab + :return is_colab (bool): True or False + """ + is_colab = False + try: # Check if running interactively using ipython. + from IPython.core.getipython import get_ipython + + from_ipynb = get_ipython() + if "google.colab" in str(from_ipynb): + is_colab = True + except (ImportError, NameError): + pass + return is_colab + + +def kaggle_check() -> bool: + return bool( + os.environ.get("KAGGLE_KERNEL_RUN_TYPE") or os.environ.get("GFOOTBALL_DATA_DIR") + ) + + +def sagemaker_check() -> bool: + try: + import boto3 # type: ignore + + client = boto3.client("sts") + response = client.get_caller_identity() + return "sagemaker" in response["Arn"].lower() + except Exception: + return False + + +def ipython_check() -> bool: + """ + Check if interface is launching from iPython (not colab) + :return is_ipython (bool): True or False + """ + is_ipython = False + try: # Check if running interactively using ipython. + from IPython.core.getipython import get_ipython + + if get_ipython() is not None: + is_ipython = True + except (ImportError, NameError): + pass + return is_ipython + + +def get_space() -> str | None: + if os.getenv("SYSTEM") == "spaces": + return os.getenv("SPACE_ID") + return None + + +def is_zero_gpu_space() -> bool: + return os.getenv("SPACES_ZERO_GPU") == "true" + + +def readme_to_html(article: str) -> str: + try: + response = requests.get(article, timeout=3) + if response.status_code == requests.codes.ok: # pylint: disable=no-member + article = response.text + except requests.exceptions.RequestException: + pass + return article + + +def show_tip(interface: gradio.Blocks) -> None: + if interface.show_tips and random.random() < 1.5: + tip: str = random.choice(en["TIPS"]) + print(f"Tip: {tip}") + + +def launch_counter() -> None: + try: + if not os.path.exists(JSON_PATH): + launches = {"launches": 1} + with open(JSON_PATH, "w+") as j: + json.dump(launches, j) + else: + with open(JSON_PATH) as j: + launches = json.load(j) + launches["launches"] += 1 + if launches["launches"] in [25, 50, 150, 500, 1000]: + print(en["BETA_INVITE"]) + with open(JSON_PATH, "w") as j: + j.write(json.dumps(launches)) + except Exception: + pass + + +def get_default_args(func: Callable) -> list[Any]: + signature = inspect.signature(func) + return [ + v.default if v.default is not inspect.Parameter.empty else None + for v in signature.parameters.values() + ] + + +def assert_configs_are_equivalent_besides_ids( + config1: dict, config2: dict, root_keys: tuple = ("mode",) +): + """Allows you to test if two different Blocks configs produce the same demo. + + Parameters: + config1 (dict): nested dict with config from the first Blocks instance + config2 (dict): nested dict with config from the second Blocks instance + root_keys (Tuple): an interable consisting of which keys to test for equivalence at + the root level of the config. By default, only "mode" is tested, + so keys like "version" are ignored. + """ + config1 = copy.deepcopy(config1) + config2 = copy.deepcopy(config2) + config1 = json.loads(json.dumps(config1)) # convert tuples to lists + config2 = json.loads(json.dumps(config2)) + + for key in root_keys: + assert config1[key] == config2[key], f"Configs have different: {key}" + + assert len(config1["components"]) == len( + config2["components"] + ), "# of components are different" + + def assert_same_components(config1_id, config2_id): + c1 = list(filter(lambda c: c["id"] == config1_id, config1["components"])) + if len(c1) == 0: + raise ValueError(f"Could not find component with id {config1_id}") + c1 = c1[0] + c2 = list(filter(lambda c: c["id"] == config2_id, config2["components"])) + if len(c2) == 0: + raise ValueError(f"Could not find component with id {config2_id}") + c2 = c2[0] + c1 = copy.deepcopy(c1) + c1.pop("id") + c2 = copy.deepcopy(c2) + c2.pop("id") + assert c1 == c2, f"{c1} does not match {c2}" + + def same_children_recursive(children1, chidren2): + for child1, child2 in zip(children1, chidren2): + assert_same_components(child1["id"], child2["id"]) + if "children" in child1 or "children" in child2: + same_children_recursive(child1["children"], child2["children"]) + + children1 = config1["layout"]["children"] + children2 = config2["layout"]["children"] + same_children_recursive(children1, children2) + + for d1, d2 in zip(config1["dependencies"], config2["dependencies"]): + for t1, t2 in zip(d1.pop("targets"), d2.pop("targets")): + assert_same_components(t1[0], t2[0]) + for i1, i2 in zip(d1.pop("inputs"), d2.pop("inputs")): + assert_same_components(i1, i2) + for o1, o2 in zip(d1.pop("outputs"), d2.pop("outputs")): + assert_same_components(o1, o2) + + assert d1 == d2, f"{d1} does not match {d2}" + + return True + + +def format_ner_list(input_string: str, ner_groups: list[dict[str, str | int]]): + if len(ner_groups) == 0: + return [(input_string, None)] + + output = [] + end = 0 + prev_end = 0 + + for group in ner_groups: + entity, start, end = group["entity_group"], group["start"], group["end"] + output.append((input_string[prev_end:start], None)) + output.append((input_string[start:end], entity)) + prev_end = end + + output.append((input_string[end:], None)) + return output + + +def delete_none(_dict: dict, skip_value: bool = False) -> dict: + """ + Delete keys whose values are None from a dictionary + """ + for key, value in list(_dict.items()): + if skip_value and key == "value": + continue + elif value is None: + del _dict[key] + return _dict + + +def resolve_singleton(_list: list[Any] | Any) -> Any: + if len(_list) == 1: + return _list[0] + else: + return _list + + +def component_or_layout_class(cls_name: str) -> type[Component] | type[BlockContext]: + """ + Returns the component, template, or layout class with the given class name, or + raises a ValueError if not found. + + Parameters: + cls_name (str): lower-case string class name of a component + Returns: + cls: the component class + """ + import gradio.blocks + import gradio.components + import gradio.layouts + import gradio.templates + + components = [ + (name, cls) + for name, cls in gradio.components.__dict__.items() + if isinstance(cls, type) + ] + templates = [ + (name, cls) + for name, cls in gradio.templates.__dict__.items() + if isinstance(cls, type) + ] + layouts = [ + (name, cls) + for name, cls in gradio.layouts.__dict__.items() + if isinstance(cls, type) + ] + for name, cls in components + templates + layouts: + if name.lower() == cls_name.replace("_", "") and ( + issubclass(cls, gradio.components.Component) + or issubclass(cls, gradio.blocks.BlockContext) + ): + return cls + raise ValueError(f"No such component or layout: {cls_name}") + + +def run_coro_in_background(func: Callable, *args, **kwargs): + """ + Runs coroutines in background. + + Warning, be careful to not use this function in other than FastAPI scope, because the event_loop has not started yet. + You can use it in any scope reached by FastAPI app. + + correct scope examples: endpoints in routes, Blocks.process_api + incorrect scope examples: Blocks.launch + + Use startup_events in routes.py if you need to run a coro in background in Blocks.launch(). + + + Example: + utils.run_coro_in_background(fn, *args, **kwargs) + + Args: + func: + *args: + **kwargs: + + Returns: + + """ + event_loop = asyncio.get_event_loop() + return event_loop.create_task(func(*args, **kwargs)) + + +def run_sync_iterator_async(iterator): + """Helper for yielding StopAsyncIteration from sync iterators.""" + try: + return next(iterator) + except StopIteration: + # raise a ValueError here because co-routines can't raise StopIteration themselves + raise StopAsyncIteration() from None + + +class SyncToAsyncIterator: + """Treat a synchronous iterator as async one.""" + + def __init__(self, iterator, limiter) -> None: + self.iterator = iterator + self.limiter = limiter + + def __aiter__(self): + return self + + async def __anext__(self): + return await anyio.to_thread.run_sync( + run_sync_iterator_async, self.iterator, limiter=self.limiter + ) + + +async def async_iteration(iterator): + # anext not introduced until 3.10 :( + return await iterator.__anext__() + + +@contextmanager +def set_directory(path: Path | str): + """Context manager that sets the working directory to the given path.""" + origin = Path().absolute() + try: + os.chdir(path) + yield + finally: + os.chdir(origin) + + +def sanitize_value_for_csv(value: str | Number) -> str | Number: + """ + Sanitizes a value that is being written to a CSV file to prevent CSV injection attacks. + Reference: https://owasp.org/www-community/attacks/CSV_Injection + """ + if isinstance(value, Number): + return value + unsafe_prefixes = ["=", "+", "-", "@", "\t", "\n"] + unsafe_sequences = [",=", ",+", ",-", ",@", ",\t", ",\n"] + if any(value.startswith(prefix) for prefix in unsafe_prefixes) or any( + sequence in value for sequence in unsafe_sequences + ): + value = f"'{value}" + return value + + +def sanitize_list_for_csv(values: list[Any]) -> list[Any]: + """ + Sanitizes a list of values (or a list of list of values) that is being written to a + CSV file to prevent CSV injection attacks. + """ + sanitized_values = [] + for value in values: + if isinstance(value, list): + sanitized_value = [sanitize_value_for_csv(v) for v in value] + sanitized_values.append(sanitized_value) + else: + sanitized_value = sanitize_value_for_csv(value) + sanitized_values.append(sanitized_value) + return sanitized_values + + +def append_unique_suffix(name: str, list_of_names: list[str]): + """Appends a numerical suffix to `name` so that it does not appear in `list_of_names`.""" + set_of_names: set[str] = set(list_of_names) # for O(1) lookup + if name not in set_of_names: + return name + else: + suffix_counter = 1 + new_name = f"{name}_{suffix_counter}" + while new_name in set_of_names: + suffix_counter += 1 + new_name = f"{name}_{suffix_counter}" + return new_name + + +def validate_url(possible_url: str) -> bool: + headers = {"User-Agent": "gradio (https://gradio.app/; team@gradio.app)"} + try: + head_request = requests.head(possible_url, headers=headers) + # some URLs, such as AWS S3 presigned URLs, return a 405 or a 403 for HEAD requests + if head_request.status_code == 405 or head_request.status_code == 403: + return requests.get(possible_url, headers=headers).ok + return head_request.ok + except Exception: + return False + + +def is_update(val): + return isinstance(val, dict) and "update" in val.get("__type__", "") + + +def get_continuous_fn(fn: Callable, every: float) -> Callable: + def continuous_fn(*args): + while True: + output = fn(*args) + if isinstance(output, GeneratorType): + yield from output + else: + yield output + time.sleep(every) + + return continuous_fn + + +def function_wrapper( + f, before_fn=None, before_args=None, after_fn=None, after_args=None +): + before_args = [] if before_args is None else before_args + after_args = [] if after_args is None else after_args + if inspect.isasyncgenfunction(f): + + @functools.wraps(f) + async def asyncgen_wrapper(*args, **kwargs): + if before_fn: + before_fn(*before_args) + async for response in f(*args, **kwargs): + yield response + if after_fn: + after_fn(*after_args) + + return asyncgen_wrapper + + elif asyncio.iscoroutinefunction(f): + + @functools.wraps(f) + async def async_wrapper(*args, **kwargs): + if before_fn: + before_fn(*before_args) + response = await f(*args, **kwargs) + if after_fn: + after_fn(*after_args) + return response + + return async_wrapper + + elif inspect.isgeneratorfunction(f): + + @functools.wraps(f) + def gen_wrapper(*args, **kwargs): + if before_fn: + before_fn(*before_args) + yield from f(*args, **kwargs) + if after_fn: + after_fn(*after_args) + + return gen_wrapper + + else: + + @functools.wraps(f) + def wrapper(*args, **kwargs): + if before_fn: + before_fn(*before_args) + response = f(*args, **kwargs) + if after_fn: + after_fn(*after_args) + return response + + return wrapper + + +def get_function_with_locals( + fn: Callable, + blocks: Blocks, + event_id: str | None, + in_event_listener: bool, + request: Request | None, +): + def before_fn(blocks, event_id): + from gradio.context import LocalContext + + LocalContext.blocks.set(blocks) + LocalContext.in_event_listener.set(in_event_listener) + LocalContext.event_id.set(event_id) + LocalContext.request.set(request) + + def after_fn(): + from gradio.context import LocalContext + + LocalContext.in_event_listener.set(False) + LocalContext.request.set(None) + + return function_wrapper( + fn, before_fn=before_fn, before_args=(blocks, event_id), after_fn=after_fn + ) + + +async def cancel_tasks(task_ids: set[str]): + matching_tasks = [ + task for task in asyncio.all_tasks() if task.get_name() in task_ids + ] + for task in matching_tasks: + task.cancel() + await asyncio.gather(*matching_tasks, return_exceptions=True) + + +def set_task_name(task, session_hash: str, fn_index: int, batch: bool): + if not batch: + task.set_name(f"{session_hash}_{fn_index}") + + +def get_cancel_function( + dependencies: list[dict[str, Any]] +) -> tuple[Callable, list[int]]: + fn_to_comp = {} + for dep in dependencies: + if Context.root_block: + fn_index = next( + i for i, d in enumerate(Context.root_block.dependencies) if d == dep + ) + fn_to_comp[fn_index] = [ + Context.root_block.blocks[o] for o in dep["outputs"] + ] + + async def cancel(session_hash: str) -> None: + task_ids = {f"{session_hash}_{fn}" for fn in fn_to_comp} + await cancel_tasks(task_ids) + + return ( + cancel, + list(fn_to_comp.keys()), + ) + + +def get_type_hints(fn): + # Importing gradio with the canonical abbreviation. Used in typing._eval_type. + import gradio as gr # noqa: F401 + from gradio import OAuthProfile, Request # noqa: F401 + + if inspect.isfunction(fn) or inspect.ismethod(fn): + pass + elif callable(fn): + fn = fn.__call__ + else: + return {} + + try: + return typing.get_type_hints(fn) + except TypeError: + # On Python 3.9 or earlier, get_type_hints throws a TypeError if the function + # has a type annotation that include "|". We resort to parsing the signature + # manually using inspect.signature. + type_hints = {} + sig = inspect.signature(fn) + for name, param in sig.parameters.items(): + if param.annotation is inspect.Parameter.empty: + continue + if param.annotation == "gr.OAuthProfile | None": + # Special case: we want to inject the OAuthProfile value even on Python 3.9 + type_hints[name] = Optional[OAuthProfile] + if "|" in str(param.annotation): + continue + # To convert the string annotation to a class, we use the + # internal typing._eval_type function. This is not ideal, but + # it's the only way to do it without eval-ing the string. + # Since the API is internal, it may change in the future. + try: + type_hints[name] = typing._eval_type( # type: ignore + typing.ForwardRef(param.annotation), globals(), locals() + ) + except (NameError, TypeError): + pass + return type_hints + + +def is_special_typed_parameter(name, parameter_types): + from gradio.helpers import EventData + from gradio.oauth import OAuthProfile + from gradio.routes import Request + + """Checks if parameter has a type hint designating it as a gr.Request, gr.EventData or gr.OAuthProfile.""" + hint = parameter_types.get(name) + if not hint: + return False + is_request = hint == Request + is_oauth_arg = hint in (OAuthProfile, Optional[OAuthProfile]) + is_event_data = inspect.isclass(hint) and issubclass(hint, EventData) + return is_request or is_event_data or is_oauth_arg + + +def check_function_inputs_match(fn: Callable, inputs: list, inputs_as_dict: bool): + """ + Checks if the input component set matches the function + Returns: None if valid, a string error message if mismatch + """ + + signature = inspect.signature(fn) + parameter_types = get_type_hints(fn) + min_args = 0 + max_args = 0 + infinity = -1 + for name, param in signature.parameters.items(): + has_default = param.default != param.empty + if param.kind in [param.POSITIONAL_ONLY, param.POSITIONAL_OR_KEYWORD]: + if not is_special_typed_parameter(name, parameter_types): + if not has_default: + min_args += 1 + max_args += 1 + elif param.kind == param.VAR_POSITIONAL: + max_args = infinity + elif param.kind == param.KEYWORD_ONLY and not has_default: + return f"Keyword-only args must have default values for function {fn}" + arg_count = 1 if inputs_as_dict else len(inputs) + if min_args == max_args and max_args != arg_count: + warnings.warn( + f"Expected {max_args} arguments for function {fn}, received {arg_count}." + ) + if arg_count < min_args: + warnings.warn( + f"Expected at least {min_args} arguments for function {fn}, received {arg_count}." + ) + if max_args != infinity and arg_count > max_args: + warnings.warn( + f"Expected maximum {max_args} arguments for function {fn}, received {arg_count}." + ) + + +def concurrency_count_warning(queue: Callable[P, T]) -> Callable[P, T]: + @functools.wraps(queue) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + _self, *positional = args + if is_zero_gpu_space() and ( + len(positional) >= 1 or "concurrency_count" in kwargs + ): + warnings.warn( + "Queue concurrency_count on ZeroGPU Spaces cannot be overridden " + "and is always equal to Block's max_threads. " + "Consider setting max_threads value on the Block instead" + ) + return queue(*args, **kwargs) + + return wrapper + + +class TupleNoPrint(tuple): + # To remove printing function return in notebook + def __repr__(self): + return "" + + def __str__(self): + return "" + + +class MatplotlibBackendMananger: + def __enter__(self): + self._original_backend = matplotlib.get_backend() + matplotlib.use("agg") + + def __exit__(self, exc_type, exc_val, exc_tb): + matplotlib.use(self._original_backend) + + +def tex2svg(formula, *args): + with MatplotlibBackendMananger(): + import matplotlib.pyplot as plt + + fontsize = 20 + dpi = 300 + plt.rc("mathtext", fontset="cm") + fig = plt.figure(figsize=(0.01, 0.01)) + fig.text(0, 0, rf"${formula}$", fontsize=fontsize) + output = BytesIO() + fig.savefig( + output, + dpi=dpi, + transparent=True, + format="svg", + bbox_inches="tight", + pad_inches=0.0, + ) + plt.close(fig) + output.seek(0) + xml_code = output.read().decode("utf-8") + svg_start = xml_code.index(".*<\/metadata>", "", svg_code, flags=re.DOTALL) + svg_code = re.sub(r' width="[^"]+"', "", svg_code) + height_match = re.search(r'height="([\d.]+)pt"', svg_code) + if height_match: + height = float(height_match.group(1)) + new_height = height / fontsize # conversion from pt to em + svg_code = re.sub( + r'height="[\d.]+pt"', f'height="{new_height}em"', svg_code + ) + copy_code = f"{formula}" + return f"{copy_code}{svg_code}" + + +def abspath(path: str | Path) -> Path: + """Returns absolute path of a str or Path path, but does not resolve symlinks.""" + path = Path(path) + + if path.is_absolute(): + return path + + # recursively check if there is a symlink within the path + is_symlink = path.is_symlink() or any( + parent.is_symlink() for parent in path.parents + ) + + if is_symlink or path == path.resolve(): # in case path couldn't be resolved + return Path.cwd() / path + else: + return path.resolve() + + +def is_in_or_equal(path_1: str | Path, path_2: str | Path): + """ + True if path_1 is a descendant (i.e. located within) path_2 or if the paths are the + same, returns False otherwise. + Parameters: + path_1: str or Path (should be a file) + path_2: str or Path (can be a file or directory) + """ + path_1, path_2 = abspath(path_1), abspath(path_2) + try: + if str(path_1.relative_to(path_2)).startswith(".."): # prevent path traversal + return False + except ValueError: + return False + return True + + +def get_serializer_name(block: Block) -> str | None: + if not hasattr(block, "serialize"): + return None + + def get_class_that_defined_method(meth: Callable): + # Adapted from: https://stackoverflow.com/a/25959545/5209347 + if isinstance(meth, functools.partial): + return get_class_that_defined_method(meth.func) + if inspect.ismethod(meth) or ( + inspect.isbuiltin(meth) + and getattr(meth, "__self__", None) is not None + and getattr(meth.__self__, "__class__", None) + ): + for cls in inspect.getmro(meth.__self__.__class__): + # Find the first serializer defined in gradio_client that + if issubclass(cls, Serializable) and "gradio_client" in cls.__module__: + return cls + if meth.__name__ in cls.__dict__: + return cls + meth = getattr(meth, "__func__", meth) # fallback to __qualname__ parsing + if inspect.isfunction(meth): + cls = getattr( + inspect.getmodule(meth), + meth.__qualname__.split(".", 1)[0].rsplit(".", 1)[0], + None, + ) + if isinstance(cls, type): + return cls + return getattr(meth, "__objclass__", None) + + cls = get_class_that_defined_method(block.serialize) # type: ignore + if cls: + return cls.__name__ + + +HTML_TAG_RE = re.compile("<.*?>") + + +def remove_html_tags(raw_html: str | None) -> str: + return re.sub(HTML_TAG_RE, "", raw_html or "") + + +def find_user_stack_level() -> int: + """ + Find the first stack frame not inside Gradio. + """ + frame = inspect.currentframe() + n = 0 + while frame: + fname = inspect.getfile(frame) + if "/gradio/" not in fname.replace(os.sep, "/"): + break + frame = frame.f_back + n += 1 + return n diff --git a/testbed/gradio-app__gradio/gradio/wasm_utils.py b/testbed/gradio-app__gradio/gradio/wasm_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..205892bdb3531f1c28c4ecb782b860de7f7a325a --- /dev/null +++ b/testbed/gradio-app__gradio/gradio/wasm_utils.py @@ -0,0 +1,24 @@ +import sys + +# See https://pyodide.org/en/stable/usage/faq.html#how-to-detect-that-code-is-run-with-pyodide +IS_WASM = sys.platform == "emscripten" + + +class WasmUnsupportedError(Exception): + pass + + +app = None + + +# `register_app` and `get_registered_app` are used +# for the Wasm worker to get a reference to +# the Gradio's FastAPI app instance (`app`). +def register_app(_app): + global app + app = _app + + +def get_registered_app(): + global app + return app diff --git a/testbed/gradio-app__gradio/guides/01_getting-started/01_quickstart.md b/testbed/gradio-app__gradio/guides/01_getting-started/01_quickstart.md new file mode 100644 index 0000000000000000000000000000000000000000..9d584297cf69e9178c4eb353e1bafb0f77842dd3 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/01_getting-started/01_quickstart.md @@ -0,0 +1,136 @@ +# Quickstart + +**Prerequisite**: Gradio requires Python 3.8 or higher, that's all! + +## What Does Gradio Do? + +One of the _best ways to share_ your machine learning model, API, or data science workflow with others is to create an **interactive app** that allows your users or colleagues to try out the demo in their browsers. + +Gradio allows you to **build demos and share them, all in Python.** And usually in just a few lines of code! So let's get started. + +## Hello, World + +To get Gradio running with a simple "Hello, World" example, follow these three steps: + +1\. Install Gradio using pip: + +```bash +pip install gradio +``` + +2\. Run the code below as a Python script or in a Jupyter Notebook (or [Google Colab](https://colab.research.google.com/drive/18ODkJvyxHutTN0P5APWyGFO_xwNcgHDZ?usp=sharing)): + +$code_hello_world + +We shorten the imported name to `gr` for better readability of code using Gradio. This is a widely adopted convention that you should follow so that anyone working with your code can easily understand it. + +3\. The demo below will appear automatically within the Jupyter Notebook, or pop in a browser on [http://localhost:7860](http://localhost:7860) if running from a script: + +$demo_hello_world + +When developing locally, if you want to run the code as a Python script, you can use the Gradio CLI to launch the application **in reload mode**, which will provide seamless and fast development. Learn more about reloading in the [Auto-Reloading Guide](https://gradio.app/developing-faster-with-reload-mode/). + +```bash +gradio app.py +``` + +Note: you can also do `python app.py`, but it won't provide the automatic reload mechanism. + +## The `Interface` Class + +You'll notice that in order to make the demo, we created a `gr.Interface`. This `Interface` class can wrap any Python function with a user interface. In the example above, we saw a simple text-based function, but the function could be anything from music generator to a tax calculator to the prediction function of a pretrained machine learning model. + +The core `Interface` class is initialized with three required parameters: + +- `fn`: the function to wrap a UI around +- `inputs`: which component(s) to use for the input (e.g. `"text"`, `"image"` or `"audio"`) +- `outputs`: which component(s) to use for the output (e.g. `"text"`, `"image"` or `"label"`) + +Let's take a closer look at these components used to provide input and output. + +## Components Attributes + +We saw some simple `Textbox` components in the previous examples, but what if you want to change how the UI components look or behave? + +Let's say you want to customize the input text field — for example, you wanted it to be larger and have a text placeholder. If we use the actual class for `Textbox` instead of using the string shortcut, you have access to much more customizability through component attributes. + +$code_hello_world_2 +$demo_hello_world_2 + +## Multiple Input and Output Components + +Suppose you had a more complex function, with multiple inputs and outputs. In the example below, we define a function that takes a string, boolean, and number, and returns a string and number. Take a look how you pass a list of input and output components. + +$code_hello_world_3 +$demo_hello_world_3 + +You simply wrap the components in a list. Each component in the `inputs` list corresponds to one of the parameters of the function, in order. Each component in the `outputs` list corresponds to one of the values returned by the function, again in order. + +## An Image Example + +Gradio supports many types of components, such as `Image`, `DataFrame`, `Video`, or `Label`. Let's try an image-to-image function to get a feel for these! + +$code_sepia_filter +$demo_sepia_filter + +When using the `Image` component as input, your function will receive a NumPy array with the shape `(height, width, 3)`, where the last dimension represents the RGB values. We'll return an image as well in the form of a NumPy array. + +You can also set the datatype used by the component with the `type=` keyword argument. For example, if you wanted your function to take a file path to an image instead of a NumPy array, the input `Image` component could be written as: + +```python +gr.Image(type="filepath", shape=...) +``` + +Also note that our input `Image` component comes with an edit button 🖉, which allows for cropping and zooming into images. Manipulating images in this way can help reveal biases or hidden flaws in a machine learning model! + +You can read more about the many components and how to use them in the [Gradio docs](https://gradio.app/docs). + +## Chatbots + +Gradio includes a high-level class, `gr.ChatInterface`, which is similar to `gr.Interface`, but is specifically designed for chatbot UIs. The `gr.ChatInterface` class also wraps a function but this function must have a specific signature. The function should take two arguments: `message` and then `history` (the arguments can be named anything, but must be in this order) + +- `message`: a `str` representing the user's input +- `history`: a `list` of `list` representing the conversations up until that point. Each inner list consists of two `str` representing a pair: `[user input, bot response]`. + +Your function should return a single string response, which is the bot's response to the particular user input `message`. + +Other than that, `gr.ChatInterface` has no required parameters (though several are available for customization of the UI). + +Here's a toy example: + +$code_chatinterface_random_response +$demo_chatinterface_random_response + +You can [read more about `gr.ChatInterface` here](https://gradio.app/guides/creating-a-chatbot-fast). + +## Blocks: More Flexibility and Control + +Gradio offers two approaches to build apps: + +1\. **Interface** and **ChatInterface**, which provide a high-level abstraction for creating demos that we've been discussing so far. + +2\. **Blocks**, a low-level API for designing web apps with more flexible layouts and data flows. Blocks allows you to do things like feature multiple data flows and demos, control where components appear on the page, handle complex data flows (e.g. outputs can serve as inputs to other functions), and update properties/visibility of components based on user interaction — still all in Python. If this customizability is what you need, try `Blocks` instead! + +## Hello, Blocks + +Let's take a look at a simple example. Note how the API here differs from `Interface`. + +$code_hello_blocks +$demo_hello_blocks + +Things to note: + +- `Blocks` are made with a `with` clause, and any component created inside this clause is automatically added to the app. +- Components appear vertically in the app in the order they are created. (Later we will cover customizing layouts!) +- A `Button` was created, and then a `click` event-listener was added to this button. The API for this should look familiar! Like an `Interface`, the `click` method takes a Python function, input components, and output components. + +## More Complexity + +Here's an app to give you a taste of what's possible with `Blocks`: + +$code_blocks_flipper +$demo_blocks_flipper + +A lot more going on here! We'll cover how to create complex `Blocks` apps like this in the [building with blocks](https://gradio.app/blocks-and-event-listeners) section for you. + +Congrats, you're now familiar with the basics of Gradio! 🥳 Go to our [next guide](https://gradio.app/key_features) to learn more about the key features of Gradio. diff --git a/testbed/gradio-app__gradio/guides/01_getting-started/02_key-features.md b/testbed/gradio-app__gradio/guides/01_getting-started/02_key-features.md new file mode 100644 index 0000000000000000000000000000000000000000..5ccad6de12487557cb5b2607886122fd5725960f --- /dev/null +++ b/testbed/gradio-app__gradio/guides/01_getting-started/02_key-features.md @@ -0,0 +1,289 @@ +# Key Features + +Let's go through some of the most popular features of Gradio! Here are Gradio's key features: + +1. [Adding example inputs](#example-inputs) +2. [Passing custom error messages](#alerts) +3. [Adding descriptive content](#descriptive-content) +4. [Setting up flagging](#flagging) +5. [Preprocessing and postprocessing](#preprocessing-and-postprocessing) +6. [Styling demos](#styling) +7. [Queuing users](#queuing) +8. [Iterative outputs](#iterative-outputs) +9. [Progress bars](#progress-bars) +10. [Batch functions](#batch-functions) +11. [Running on collaborative notebooks](#colab-notebooks) + +## Example Inputs + +You can provide example data that a user can easily load into `Interface`. This can be helpful to demonstrate the types of inputs the model expects, as well as to provide a way to explore your dataset in conjunction with your model. To load example data, you can provide a **nested list** to the `examples=` keyword argument of the Interface constructor. Each sublist within the outer list represents a data sample, and each element within the sublist represents an input for each input component. The format of example data for each component is specified in the [Docs](https://gradio.app/docs#components). + +$code_calculator +$demo_calculator + +You can load a large dataset into the examples to browse and interact with the dataset through Gradio. The examples will be automatically paginated (you can configure this through the `examples_per_page` argument of `Interface`). + +Continue learning about examples in the [More On Examples](https://gradio.app/guides/more-on-examples) guide. + +## Alerts + +You wish to pass custom error messages to the user. To do so, raise a `gr.Error("custom message")` to display an error message. If you try to divide by zero in the calculator demo above, a popup modal will display the custom error message. Learn more about Error in the [docs](https://gradio.app/docs#error). + +You can also issue `gr.Warning("message")` and `gr.Info("message")` by having them as standalone lines in your function, which will immediately display modals while continuing the execution of your function. Queueing needs to be enabled for this to work. + +Note below how the `gr.Error` has to be raised, while the `gr.Warning` and `gr.Info` are single lines. + +```python +def start_process(name): + gr.Info("Starting process") + if name is None: + gr.Warning("Name is empty") + ... + if success == False: + raise gr.Error("Process failed") +``` + +## Descriptive Content + +In the previous example, you may have noticed the `title=` and `description=` keyword arguments in the `Interface` constructor that helps users understand your app. + +There are three arguments in the `Interface` constructor to specify where this content should go: + +- `title`: which accepts text and can display it at the very top of interface, and also becomes the page title. +- `description`: which accepts text, markdown or HTML and places it right under the title. +- `article`: which also accepts text, markdown or HTML and places it below the interface. + +![annotated](https://github.com/gradio-app/gradio/blob/main/guides/assets/annotated.png?raw=true) + +If you're using the `Blocks` API instead, you can insert text, markdown, or HTML anywhere using the `gr.Markdown(...)` or `gr.HTML(...)` components, with descriptive content inside the `Component` constructor. + +Another useful keyword argument is `label=`, which is present in every `Component`. This modifies the label text at the top of each `Component`. You can also add the `info=` keyword argument to form elements like `Textbox` or `Radio` to provide further information on their usage. + +```python +gr.Number(label='Age', info='In years, must be greater than 0') +``` + +## Flagging + +By default, an `Interface` will have "Flag" button. When a user testing your `Interface` sees input with interesting output, such as erroneous or unexpected model behaviour, they can flag the input for you to review. Within the directory provided by the `flagging_dir=` argument to the `Interface` constructor, a CSV file will log the flagged inputs. If the interface involves file data, such as for Image and Audio components, folders will be created to store those flagged data as well. + +For example, with the calculator interface shown above, we would have the flagged data stored in the flagged directory shown below: + +```directory ++-- calculator.py ++-- flagged/ +| +-- logs.csv +``` + +_flagged/logs.csv_ + +```csv +num1,operation,num2,Output +5,add,7,12 +6,subtract,1.5,4.5 +``` + +With the sepia interface shown earlier, we would have the flagged data stored in the flagged directory shown below: + +```directory ++-- sepia.py ++-- flagged/ +| +-- logs.csv +| +-- im/ +| | +-- 0.png +| | +-- 1.png +| +-- Output/ +| | +-- 0.png +| | +-- 1.png +``` + +_flagged/logs.csv_ + +```csv +im,Output +im/0.png,Output/0.png +im/1.png,Output/1.png +``` + +If you wish for the user to provide a reason for flagging, you can pass a list of strings to the `flagging_options` argument of Interface. Users will have to select one of the strings when flagging, which will be saved as an additional column to the CSV. + +## Preprocessing and Postprocessing + +![](https://github.com/gradio-app/gradio/blob/main/guides/assets/dataflow.svg?raw=true) + +As you've seen, Gradio includes components that can handle a variety of different data types, such as images, audio, and video. Most components can be used both as inputs or outputs. + +When a component is used as an input, Gradio automatically handles the _preprocessing_ needed to convert the data from a type sent by the user's browser (such as a base64 representation of a webcam snapshot) to a form that can be accepted by your function (such as a `numpy` array). + +Similarly, when a component is used as an output, Gradio automatically handles the _postprocessing_ needed to convert the data from what is returned by your function (such as a list of image paths) to a form that can be displayed in the user's browser (such as a `Gallery` of images in base64 format). + +You can control the _preprocessing_ using the parameters when constructing the image component. For example, here if you instantiate the `Image` component with the following parameters, it will convert the image to the `PIL` type and reshape it to be `(100, 100)` no matter the original size that it was submitted as: + +```py +img = gr.Image(shape=(100, 100), type="pil") +``` + +In contrast, here we keep the original size of the image, but invert the colors before converting it to a numpy array: + +```py +img = gr.Image(invert_colors=True, type="numpy") +``` + +Postprocessing is a lot easier! Gradio automatically recognizes the format of the returned data (e.g. is the `Image` a `numpy` array or a `str` filepath?) and postprocesses it into a format that can be displayed by the browser. + +Take a look at the [Docs](https://gradio.app/docs) to see all the preprocessing-related parameters for each Component. + +## Styling + +Gradio themes are the easiest way to customize the look and feel of your app. You can choose from a variety of themes, or create your own. To do so, pass the `theme=` kwarg to the `Interface` constructor. For example: + +```python +demo = gr.Interface(..., theme=gr.themes.Monochrome()) +``` + +Gradio comes with a set of prebuilt themes which you can load from `gr.themes.*`. You can extend these themes or create your own themes from scratch - see the [Theming guide](https://gradio.app/guides/theming-guide) for more details. + +For additional styling ability, you can pass any CSS to your app using the `css=` kwarg. +The base class for the Gradio app is `gradio-container`, so here's an example that changes the background color of the Gradio app: + +```python +with gr.Interface(css=".gradio-container {background-color: red}") as demo: + ... +``` + +## Queuing + +If your app expects heavy traffic, use the `queue()` method to control processing rate. This will queue up calls so only a certain number of requests are processed at a single time. Queueing uses websockets, which also prevent network timeouts, so you should use queueing if the inference time of your function is long (> 1min). + +With `Interface`: + +```python +demo = gr.Interface(...).queue() +demo.launch() +``` + +With `Blocks`: + +```python +with gr.Blocks() as demo: + #... +demo.queue() +demo.launch() +``` + +You can control the number of requests processed at a single time as such: + +```python +demo.queue(concurrency_count=3) +``` + +See the [Docs on queueing](/docs/#queue) on configuring other queuing parameters. + +To specify only certain functions for queueing in Blocks: + +```python +with gr.Blocks() as demo2: + num1 = gr.Number() + num2 = gr.Number() + output = gr.Number() + gr.Button("Add").click( + lambda a, b: a + b, [num1, num2], output) + gr.Button("Multiply").click( + lambda a, b: a * b, [num1, num2], output, queue=True) +demo2.launch() +``` + +## Iterative Outputs + +In some cases, you may want to stream a sequence of outputs rather than show a single output at once. For example, you might have an image generation model and you want to show the image that is generated at each step, leading up to the final image. Or you might have a chatbot which streams its response one word at a time instead of returning it all at once. + +In such cases, you can supply a **generator** function into Gradio instead of a regular function. Creating generators in Python is very simple: instead of a single `return` value, a function should `yield` a series of values instead. Usually the `yield` statement is put in some kind of loop. Here's an example of an generator that simply counts up to a given number: + +```python +def my_generator(x): + for i in range(x): + yield i +``` + +You supply a generator into Gradio the same way as you would a regular function. For example, here's a a (fake) image generation model that generates noise for several steps before outputting an image: + +$code_fake_diffusion +$demo_fake_diffusion + +Note that we've added a `time.sleep(1)` in the iterator to create an artificial pause between steps so that you are able to observe the steps of the iterator (in a real image generation model, this probably wouldn't be necessary). + +Supplying a generator into Gradio **requires** you to enable queuing in the underlying Interface or Blocks (see the queuing section above). + +## Progress Bars + +Gradio supports the ability to create a custom Progress Bars so that you have customizability and control over the progress update that you show to the user. In order to enable this, simply add an argument to your method that has a default value of a `gr.Progress` instance. Then you can update the progress levels by calling this instance directly with a float between 0 and 1, or using the `tqdm()` method of the `Progress` instance to track progress over an iterable, as shown below. Queueing must be enabled for progress updates. + +$code_progress_simple +$demo_progress_simple + +If you use the `tqdm` library, you can even report progress updates automatically from any `tqdm.tqdm` that already exists within your function by setting the default argument as `gr.Progress(track_tqdm=True)`! + +## Batch Functions + +Gradio supports the ability to pass _batch_ functions. Batch functions are just +functions which take in a list of inputs and return a list of predictions. + +For example, here is a batched function that takes in two lists of inputs (a list of +words and a list of ints), and returns a list of trimmed words as output: + +```py +import time + +def trim_words(words, lens): + trimmed_words = [] + time.sleep(5) + for w, l in zip(words, lens): + trimmed_words.append(w[:int(l)]) + return [trimmed_words] +``` + +The advantage of using batched functions is that if you enable queuing, the Gradio +server can automatically _batch_ incoming requests and process them in parallel, +potentially speeding up your demo. Here's what the Gradio code looks like (notice +the `batch=True` and `max_batch_size=16` -- both of these parameters can be passed +into event triggers or into the `Interface` class) + +With `Interface`: + +```python +demo = gr.Interface(trim_words, ["textbox", "number"], ["output"], + batch=True, max_batch_size=16) +demo.queue() +demo.launch() +``` + +With `Blocks`: + +```py +import gradio as gr + +with gr.Blocks() as demo: + with gr.Row(): + word = gr.Textbox(label="word") + leng = gr.Number(label="leng") + output = gr.Textbox(label="Output") + with gr.Row(): + run = gr.Button() + + event = run.click(trim_words, [word, leng], output, batch=True, max_batch_size=16) + +demo.queue() +demo.launch() +``` + +In the example above, 16 requests could be processed in parallel (for a total inference +time of 5 seconds), instead of each request being processed separately (for a total +inference time of 80 seconds). Many Hugging Face `transformers` and `diffusers` models +work very naturally with Gradio's batch mode: here's [an example demo using diffusers to +generate images in batches](https://github.com/gradio-app/gradio/blob/main/demo/diffusers_with_batching/run.py) + +Note: using batch functions with Gradio **requires** you to enable queuing in the underlying Interface or Blocks (see the queuing section above). + +## Colab Notebooks + +Gradio is able to run anywhere you run Python, including local jupyter notebooks as well as collaborative notebooks, such as [Google Colab](https://colab.research.google.com/). In the case of local jupyter notebooks and Google Colab notbooks, Gradio runs on a local server which you can interact with in your browser. (Note: for Google Colab, this is accomplished by [service worker tunneling](https://github.com/tensorflow/tensorboard/blob/master/docs/design/colab_integration.md), which requires cookies to be enabled in your browser.) For other remote notebooks, Gradio will also run on a server, but you will need to use [SSH tunneling](https://coderwall.com/p/ohk6cg/remote-access-to-ipython-notebooks-via-ssh) to view the app in your local browser. Often a simpler options is to use Gradio's built-in public links, [discussed in the next Guide](https://gradio.app/guides/sharing-your-app/#sharing-demos). diff --git a/testbed/gradio-app__gradio/guides/01_getting-started/03_sharing-your-app.md b/testbed/gradio-app__gradio/guides/01_getting-started/03_sharing-your-app.md new file mode 100644 index 0000000000000000000000000000000000000000..d5105a921c5a22d40d76344c858ce21421f844d5 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/01_getting-started/03_sharing-your-app.md @@ -0,0 +1,283 @@ +# Sharing Your App + +How to share your Gradio app: + +1. [Sharing demos with the share parameter](#sharing-demos) +2. [Hosting on HF Spaces](#hosting-on-hf-spaces) +3. [Embedding hosted spaces](#embedding-hosted-spaces) +4. [Embedding with web components](#embedding-with-web-components) +5. [Using the API page](#api-page) +6. [Adding authentication to the page](#authentication) +7. [Accessing Network Requests](#accessing-the-network-request-directly) +8. [Mounting within FastAPI](#mounting-within-another-fast-api-app) +9. [Security](#security-and-file-access) + +## Sharing Demos + +Gradio demos can be easily shared publicly by setting `share=True` in the `launch()` method. Like this: + +```python +demo.launch(share=True) +``` + +This generates a public, shareable link that you can send to anybody! When you send this link, the user on the other side can try out the model in their browser. Because the processing happens on your device (as long as your device stays on!), you don't have to worry about any packaging any dependencies. A share link usually looks something like this: **XXXXX.gradio.app**. Although the link is served through a Gradio URL, we are only a proxy for your local server, and do not store any data sent through your app. + +Keep in mind, however, that these links are publicly accessible, meaning that anyone can use your model for prediction! Therefore, make sure not to expose any sensitive information through the functions you write, or allow any critical changes to occur on your device. If you set `share=False` (the default, except in colab notebooks), only a local link is created, which can be shared by [port-forwarding](https://www.ssh.com/ssh/tunneling/example) with specific users. + +![sharing](https://github.com/gradio-app/gradio/blob/main/guides/assets/sharing.svg?raw=true) + +Share links expire after 72 hours. + +## Hosting on HF Spaces + +If you'd like to have a permanent link to your Gradio demo on the internet, use Hugging Face Spaces. [Hugging Face Spaces](http://huggingface.co/spaces/) provides the infrastructure to permanently host your machine learning model for free! + +After you have [created a free Hugging Face account](https://huggingface.co/join), you have three methods to deploy your Gradio app to Hugging Face Spaces: + +1. From terminal: run `gradio deploy` in your app directory. The CLI will gather some basic metadata and then launch your app. To update your space, you can re-run this command or enable the Github Actions option to automatically update the Spaces on `git push`. + +2. From your browser: Drag and drop a folder containing your Gradio model and all related files [here](https://huggingface.co/new-space). + +3. Connect Spaces with your Git repository and Spaces will pull the Gradio app from there. See [this guide how to host on Hugging Face Spaces](https://huggingface.co/blog/gradio-spaces) for more information. + + + +Note: Some components, like `gr.Image`, will display a "Share" button only on Spaces, so that users can share the generated output to the Discussions page of the Space easily. You can disable this with `show_share_button`, such as `gr.Image(show_share_button=False)`. + +![Image with show_share_button=True](https://github.com/gradio-app/gradio/blob/main/guides/assets/share_icon.png?raw=true) + +## Embedding Hosted Spaces + +Once you have hosted your app on Hugging Face Spaces (or on your own server), you may want to embed the demo on a different website, such as your blog or your portfolio. Embedding an interactive demo allows people to try out the machine learning model that you have built, without needing to download or install anything — right in their browser! The best part is that you can embed interactive demos even in static websites, such as GitHub pages. + +There are two ways to embed your Gradio demos. You can find quick links to both options directly on the Hugging Face Space page, in the "Embed this Space" dropdown option: + +![Embed this Space dropdown option](https://github.com/gradio-app/gradio/blob/main/guides/assets/embed_this_space.png?raw=true) + +### Embedding with Web Components + +Web components typically offer a better experience to users than IFrames. Web components load lazily, meaning that they won't slow down the loading time of your website, and they automatically adjust their height based on the size of the Gradio app. + +To embed with Web Components: + +1. Import the gradio JS library into into your site by adding the script below in your site (replace {GRADIO_VERSION} in the URL with the library version of Gradio you are using). + +```html + +``` + +2. Add + +```html + +``` + +element where you want to place the app. Set the `src=` attribute to your Space's embed URL, which you can find in the "Embed this Space" button. For example: + +```html + +``` + + + +You can see examples of how web components look on the Gradio landing page. + +You can also customize the appearance and behavior of your web component with attributes that you pass into the `` tag: + +- `src`: as we've seen, the `src` attributes links to the URL of the hosted Gradio demo that you would like to embed +- `space`: an optional shorthand if your Gradio demo is hosted on Hugging Face Space. Accepts a `username/space_name` instead of a full URL. Example: `gradio/Echocardiogram-Segmentation`. If this attribute attribute is provided, then `src` does not need to be provided. +- `control_page_title`: a boolean designating whether the html title of the page should be set to the title of the Gradio app (by default `"false"`) +- `initial_height`: the initial height of the web component while it is loading the Gradio app, (by default `"300px"`). Note that the final height is set based on the size of the Gradio app. +- `container`: whether to show the border frame and information about where the Space is hosted (by default `"true"`) +- `info`: whether to show just the information about where the Space is hosted underneath the embedded app (by default `"true"`) +- `autoscroll`: whether to autoscroll to the output when prediction has finished (by default `"false"`) +- `eager`: whether to load the Gradio app as soon as the page loads (by default `"false"`) +- `theme_mode`: whether to use the `dark`, `light`, or default `system` theme mode (by default `"system"`) +- `render`: an event that is triggered once the embedded space has finished rendering. + +Here's an example of how to use these attributes to create a Gradio app that does not lazy load and has an initial height of 0px. + +```html + +``` + +Here's another example of how to use the `render` event. An event listener is used to capture the `render` event and will call the `handleLoadComplete()` function once rendering is complete. + +```html + +``` + +_Note: While Gradio's CSS will never impact the embedding page, the embedding page can affect the style of the embedded Gradio app. Make sure that any CSS in the parent page isn't so general that it could also apply to the embedded Gradio app and cause the styling to break. Element selectors such as `header { ... }` and `footer { ... }` will be the most likely to cause issues._ + +### Embedding with IFrames + +To embed with IFrames instead (if you cannot add javascript to your website, for example), add this element: + +```html + +``` + +Again, you can find the `src=` attribute to your Space's embed URL, which you can find in the "Embed this Space" button. + +Note: if you use IFrames, you'll probably want to add a fixed `height` attribute and set `style="border:0;"` to remove the boreder. In addition, if your app requires permissions such as access to the webcam or the microphone, you'll need to provide that as well using the `allow` attribute. + +## API Page + +You can use almost any Gradio app as an API! In the footer of a Gradio app [like this one](https://huggingface.co/spaces/gradio/hello_world), you'll see a "Use via API" link. + +![Use via API](https://github.com/gradio-app/gradio/blob/main/guides/assets/use_via_api.png?raw=true) + +This is a page that lists the endpoints that can be used to query the Gradio app, via our supported clients: either [the Python client](https://gradio.app/guides/getting-started-with-the-python-client/), or [the JavaScript client](https://gradio.app/guides/getting-started-with-the-js-client/). For each endpoint, Gradio automatically generates the parameters and their types, as well as example inputs. + +The endpoints are automatically created when you launch a Gradio `Interface`. If you are using Gradio `Blocks`, you can also set up a Gradio API page, though we recommend that you explicitly name each event listener, such as + +```python +btn.click(add, [num1, num2], output, api_name="addition") +``` + +This will add and document the endpoint `/api/addition/` to the automatically generated API page. Otherwise, your API endpoints will appear as "unnamed" endpoints. + +_Note_: For Gradio apps in which [queueing is enabled](https://gradio.app/guides/key-features#queuing), users can bypass the queue if they make a POST request to your API endpoint. To disable this behavior, set `api_open=False` in the `queue()` method. To disable the API page altogether, set `show_api=False` in `.launch()`. + +## Authentication + +### Password-protected app + +You may wish to put an authentication page in front of your app to limit who can open your app. With the `auth=` keyword argument in the `launch()` method, you can provide a tuple with a username and password, or a list of acceptable username/password tuples; Here's an example that provides password-based authentication for a single user named "admin": + +```python +demo.launch(auth=("admin", "pass1234")) +``` + +For more complex authentication handling, you can even pass a function that takes a username and password as arguments, and returns True to allow authentication, False otherwise. This can be used for, among other things, making requests to 3rd-party authentication services. + +Here's an example of a function that accepts any login where the username and password are the same: + +```python +def same_auth(username, password): + return username == password +demo.launch(auth=same_auth) +``` + +For authentication to work properly, third party cookies must be enabled in your browser. +This is not the case by default for Safari, Chrome Incognito Mode. + +### OAuth (Login via Hugging Face) + +Gradio supports OAuth login via Hugging Face. This feature is currently **experimental** and only available on Spaces. +If allows to add a _"Sign in with Hugging Face"_ button to your demo. Check out [this Space](https://huggingface.co/spaces/Wauplin/gradio-oauth-demo) +for a live demo. + +To enable OAuth, you must set `hf_oauth: true` as a Space metadata in your README.md file. This will register your Space +as an OAuth application on Hugging Face. Next, you can use `gr.LoginButton` and `gr.LogoutButton` to add login and logout buttons to +your Gradio app. Once a user is logged in with their HF account, you can retrieve their profile by adding a parameter of type +`gr.OAuthProfile` to any Gradio function. The user profile will be automatically injected as a parameter value. + +Here is a short example: + +```py +import gradio as gr + + +def hello(profile: gr.OAuthProfile | None) -> str: + if profile is None: + return "I don't know you." + return f"Hello {profile.name}" + + +with gr.Blocks() as demo: + gr.LoginButton() + gr.LogoutButton() + gr.Markdown().attach_load_event(hello, None) +``` + +When the user clicks on the login button, they get redirected in a new page to authorize your Space. + +![Allow Space app](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/oauth_sign_in.png) + +Users can revoke access to their profile at any time in their [settings](https://huggingface.co/settings/connected-applications). + +As seen above, OAuth features are available only when your app runs in a Space. However, you often need to test your app +locally before deploying it. To help with that, the `gr.LoginButton` is mocked. When a user clicks on it, they are +automatically logged in with a fake user profile. This allows you to debug your app before deploying it to a Space. + +## Accessing the Network Request Directly + +When a user makes a prediction to your app, you may need the underlying network request, in order to get the request headers (e.g. for advanced authentication), log the client's IP address, getting the query parameters, or for other reasons. Gradio supports this in a similar manner to FastAPI: simply add a function parameter whose type hint is `gr.Request` and Gradio will pass in the network request as that parameter. Here is an example: + +```python +import gradio as gr + +def echo(text, request: gr.Request): + if request: + print("Request headers dictionary:", request.headers) + print("IP address:", request.client.host) + print("Query parameters:", dict(request.query_params)) + return text + +io = gr.Interface(echo, "textbox", "textbox").launch() +``` + +Note: if your function is called directly instead of through the UI (this happens, for +example, when examples are cached, or when the Gradio app is called via API), then `request` will be `None`. +You should handle this case explicitly to ensure that your app does not throw any errors. That is why +we have the explicit check `if request`. + +## Mounting Within Another FastAPI App + +In some cases, you might have an existing FastAPI app, and you'd like to add a path for a Gradio demo. +You can easily do this with `gradio.mount_gradio_app()`. + +Here's a complete example: + +$code_custom_path + +Note that this approach also allows you run your Gradio apps on custom paths (`http://localhost:8000/gradio` in the example above). + +## Security and File Access + +Sharing your Gradio app with others (by hosting it on Spaces, on your own server, or through temporary share links) **exposes** certain files on the host machine to users of your Gradio app. + +In particular, Gradio apps ALLOW users to access to three kinds of files: + +- **Files in the same directory (or a subdirectory) of where the Gradio script is launched from.** For example, if the path to your gradio scripts is `/home/usr/scripts/project/app.py` and you launch it from `/home/usr/scripts/project/`, then users of your shared Gradio app will be able to access any files inside `/home/usr/scripts/project/`. This is done so that you can easily reference these files in your Gradio app (e.g. for your app's `examples`). + +- **Temporary files created by Gradio.** These are files that are created by Gradio as part of running your prediction function. For example, if your prediction function returns a video file, then Gradio will save that video to a temporary file and then send the path to the temporary file to the front end. You can customize the location of temporary files created by Gradio by setting the environment variable `GRADIO_TEMP_DIR` to an absolute path, such as `/home/usr/scripts/project/temp/`. + +- **Files that you explicitly allow via the `allowed_paths` parameter in `launch()`**. This parameter allows you to pass in a list of additional directories or exact filepaths you'd like to allow users to have access to. (By default, this parameter is an empty list). + +Gradio DOES NOT ALLOW access to: + +- **Dotfiles** (any files whose name begins with `'.'`) or any files that are contained in any directory whose name begins with `'.'` + +- **Files that you explicitly block via the `blocked_paths` parameter in `launch()`**. You can pass in a list of additional directories or exact filepaths to the `blocked_paths` parameter in `launch()`. This parameter takes precedence over the files that Gradio exposes by default or by the `allowed_paths`. + +- **Any other paths on the host machine**. Users should NOT be able to access other arbitrary paths on the host. + +Please make sure you are running the latest version of `gradio` for these security settings to apply. diff --git a/testbed/gradio-app__gradio/guides/02_building-interfaces/01_interface-state.md b/testbed/gradio-app__gradio/guides/02_building-interfaces/01_interface-state.md new file mode 100644 index 0000000000000000000000000000000000000000..eaf4f397d31e35236107cc8c89d53695a19df333 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/02_building-interfaces/01_interface-state.md @@ -0,0 +1,28 @@ +# Interface State + +This guide covers how State is handled in Gradio. Learn the difference between Global and Session states, and how to use both. + +## Global State + +Your function may use data that persists beyond a single function call. If the data is something accessible to all function calls and all users, you can create a variable outside the function call and access it inside the function. For example, you may load a large model outside the function and use it inside the function so that every function call does not need to reload the model. + +$code_score_tracker + +In the code above, the `scores` array is shared between all users. If multiple users are accessing this demo, their scores will all be added to the same list, and the returned top 3 scores will be collected from this shared reference. + +## Session State + +Another type of data persistence Gradio supports is session **state**, where data persists across multiple submits within a page session. However, data is _not_ shared between different users of your model. To store data in a session state, you need to do three things: + +1. Pass in an extra parameter into your function, which represents the state of the interface. +2. At the end of the function, return the updated value of the state as an extra return value. +3. Add the `'state'` input and `'state'` output components when creating your `Interface` + +A chatbot is an example where you would need session state - you want access to a users previous submissions, but you cannot store chat history in a global variable, because then chat history would get jumbled between different users. + +$code_chatbot_dialogpt +$demo_chatbot_dialogpt + +Notice how the state persists across submits within each page, but if you load this demo in another tab (or refresh the page), the demos will not share chat history. + +The default value of `state` is None. If you pass a default value to the state parameter of the function, it is used as the default value of the state instead. The `Interface` class only supports a single input and outputs state variable, though it can be a list with multiple elements. For more complex use cases, you can use Blocks, [which supports multiple `State` variables](/guides/state-in-blocks/). diff --git a/testbed/gradio-app__gradio/guides/02_building-interfaces/02_reactive-interfaces.md b/testbed/gradio-app__gradio/guides/02_building-interfaces/02_reactive-interfaces.md new file mode 100644 index 0000000000000000000000000000000000000000..0715a10733490a8bd4aa98a9319cb460fbf7b5c2 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/02_building-interfaces/02_reactive-interfaces.md @@ -0,0 +1,26 @@ +# Reactive Interfaces + +This guide covers how to get Gradio interfaces to refresh automatically or continuously stream data. + +## Live Interfaces + +You can make interfaces automatically refresh by setting `live=True` in the interface. Now the interface will recalculate as soon as the user input changes. + +$code_calculator_live +$demo_calculator_live + +Note there is no submit button, because the interface resubmits automatically on change. + +## Streaming Components + +Some components have a "streaming" mode, such as `Audio` component in microphone mode, or the `Image` component in webcam mode. Streaming means data is sent continuously to the backend and the `Interface` function is continuously being rerun. + +The difference between `gr.Audio(source='microphone')` and `gr.Audio(source='microphone', streaming=True)`, when both are used in `gr.Interface(live=True)`, is that the first `Component` will automatically submit data and run the `Interface` function when the user stops recording, whereas the second `Component` will continuously send data and run the `Interface` function _during_ recording. + +Here is example code of streaming images from the webcam. + +$code_stream_frames + +Streaming can also be done in an output component. A `gr.Audio(streaming=True)` output component can take a stream of audio data yielded piece-wise by a generator function and combines them into a single audio file. + +$code_stream_audio_out diff --git a/testbed/gradio-app__gradio/guides/02_building-interfaces/03_more-on-examples.md b/testbed/gradio-app__gradio/guides/02_building-interfaces/03_more-on-examples.md new file mode 100644 index 0000000000000000000000000000000000000000..2860dc903fb6ca88157d06d4eaa5b96f8bf1051f --- /dev/null +++ b/testbed/gradio-app__gradio/guides/02_building-interfaces/03_more-on-examples.md @@ -0,0 +1,41 @@ +# More on Examples + +This guide covers what more you can do with Examples: Loading examples from a directory, providing partial examples, and caching. If Examples is new to you, check out the intro in the [Key Features](/guides/key-features/#example-inputs) guide. + +## Providing Examples + +As covered in the [Key Features](/guides/key-features/#example-inputs) guide, adding examples to an Interface is as easy as providing a list of lists to the `examples` +keyword argument. +Each sublist is a data sample, where each element corresponds to an input of the prediction function. +The inputs must be ordered in the same order as the prediction function expects them. + +If your interface only has one input component, then you can provide your examples as a regular list instead of a list of lists. + +### Loading Examples from a Directory + +You can also specify a path to a directory containing your examples. If your Interface takes only a single file-type input, e.g. an image classifier, you can simply pass a directory filepath to the `examples=` argument, and the `Interface` will load the images in the directory as examples. +In the case of multiple inputs, this directory must +contain a log.csv file with the example values. +In the context of the calculator demo, we can set `examples='/demo/calculator/examples'` and in that directory we include the following `log.csv` file: + +```csv +num,operation,num2 +5,"add",3 +4,"divide",2 +5,"multiply",3 +``` + +This can be helpful when browsing flagged data. Simply point to the flagged directory and the `Interface` will load the examples from the flagged data. + +### Providing Partial Examples + +Sometimes your app has many input components, but you would only like to provide examples for a subset of them. In order to exclude some inputs from the examples, pass `None` for all data samples corresponding to those particular components. + +## Caching examples + +You may wish to provide some cached examples of your model for users to quickly try out, in case your model takes a while to run normally. +If `cache_examples=True`, the `Interface` will run all of your examples through your app and save the outputs when you call the `launch()` method. This data will be saved in a directory called `gradio_cached_examples`. + +Whenever a user clicks on an example, the output will automatically be populated in the app now, using data from this cached directory instead of actually running the function. This is useful so users can quickly try out your model without adding any load! + +Keep in mind once the cache is generated, it will not be updated in future launches. If the examples or function logic change, delete the cache folder to clear the cache and rebuild it with another `launch()`. diff --git a/testbed/gradio-app__gradio/guides/02_building-interfaces/04_advanced-interface-features.md b/testbed/gradio-app__gradio/guides/02_building-interfaces/04_advanced-interface-features.md new file mode 100644 index 0000000000000000000000000000000000000000..99f96c83d54f26700be83b7f01890ffa1b47ed7e --- /dev/null +++ b/testbed/gradio-app__gradio/guides/02_building-interfaces/04_advanced-interface-features.md @@ -0,0 +1,32 @@ +# Advanced Interface Features + +## Loading Hugging Face Models and Spaces + +Gradio integrates nicely with the [Hugging Face Hub](https://hf.co), allowing you to load models and Spaces with just one line of code. To use this, simply use the `load()` method in the `Interface` class. So: + +- To load any model from the Hugging Face Hub and create an interface around it, you pass `"model/"` or `"huggingface/"` followed by the model name, like these examples: + +```python +gr.Interface.load("huggingface/gpt2").launch(); +``` + +```python +gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", + inputs=gr.Textbox(lines=5, label="Input Text") # customizes the input component +).launch() +``` + +- To load any Space from the Hugging Face Hub and recreate it locally (so that you can customize the inputs and outputs for example), you pass `"spaces/"` followed by the model name: + +```python +gr.Interface.load("spaces/eugenesiow/remove-bg", + inputs="webcam", + title="Remove your webcam background!").launch() +``` + +One of the great things about loading Hugging Face models or spaces using Gradio is that you can then immediately use the resulting `Interface` object just like function in your Python code (this works for every type of model/space: text, images, audio, video, and even multimodal models): + +```python +io = gr.Interface.load("models/EleutherAI/gpt-neo-2.7B") +io("It was the best of times") # outputs model completion +``` diff --git a/testbed/gradio-app__gradio/guides/02_building-interfaces/05_four-kinds-of-interfaces.md b/testbed/gradio-app__gradio/guides/02_building-interfaces/05_four-kinds-of-interfaces.md new file mode 100644 index 0000000000000000000000000000000000000000..36ad6e99eb716c285ba253dc8ae38632e2fa7e72 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/02_building-interfaces/05_four-kinds-of-interfaces.md @@ -0,0 +1,44 @@ +# The 4 Kinds of Gradio Interfaces + +So far, we've always assumed that in order to build an Gradio demo, you need both inputs and outputs. But this isn't always the case for machine learning demos: for example, _unconditional image generation models_ don't take any input but produce an image as the output. + +It turns out that the `gradio.Interface` class can actually handle 4 different kinds of demos: + +1. **Standard demos**: which have both separate inputs and outputs (e.g. an image classifier or speech-to-text model) +2. **Output-only demos**: which don't take any input but produce on output (e.g. an unconditional image generation model) +3. **Input-only demos**: which don't produce any output but do take in some sort of input (e.g. a demo that saves images that you upload to a persistent external database) +4. **Unified demos**: which have both input and output components, but the input and output components _are the same_. This means that the output produced overrides the input (e.g. a text autocomplete model) + +Depending on the kind of demo, the user interface (UI) looks slightly different: + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/interfaces4.png) + +Let's see how to build each kind of demo using the `Interface` class, along with examples: + +## Standard demos + +To create a demo that has both the input and the output components, you simply need to set the values of the `inputs` and `outputs` parameter in `Interface()`. Here's an example demo of a simple image filter: + +$code_sepia_filter +$demo_sepia_filter + +## Output-only demos + +What about demos that only contain outputs? In order to build such a demo, you simply set the value of the `inputs` parameter in `Interface()` to `None`. Here's an example demo of a mock image generation model: + +$code_fake_gan_no_input +$demo_fake_gan_no_input + +## Input-only demos + +Similarly, to create a demo that only contains inputs, set the value of `outputs` parameter in `Interface()` to be `None`. Here's an example demo that saves any uploaded image to disk: + +$code_save_file_no_output +$demo_save_file_no_output + +## Unified demos + +A demo that has a single component as both the input and the output. It can simply be created by setting the values of the `inputs` and `outputs` parameter as the same component. Here's an example demo of a text generation model: + +$code_unified_demo_text_generation +$demo_unified_demo_text_generation diff --git a/testbed/gradio-app__gradio/guides/03_building-with-blocks/01_blocks-and-event-listeners.md b/testbed/gradio-app__gradio/guides/03_building-with-blocks/01_blocks-and-event-listeners.md new file mode 100644 index 0000000000000000000000000000000000000000..e76dcde3d0e479839ca69fe90534901314cd755f --- /dev/null +++ b/testbed/gradio-app__gradio/guides/03_building-with-blocks/01_blocks-and-event-listeners.md @@ -0,0 +1,182 @@ +# Blocks and Event Listeners + +We took a quick look at Blocks in the [Quickstart](https://gradio.app/guides/quickstart/#blocks-more-flexibility-and-control). Let's dive deeper. This guide will cover the how Blocks are structured, event listeners and their types, running events continuously, updating configurations, and using dictionaries vs lists. + +## Blocks Structure + +Take a look at the demo below. + +$code_hello_blocks +$demo_hello_blocks + +- First, note the `with gr.Blocks() as demo:` clause. The Blocks app code will be contained within this clause. +- Next come the Components. These are the same Components used in `Interface`. However, instead of being passed to some constructor, Components are automatically added to the Blocks as they are created within the `with` clause. +- Finally, the `click()` event listener. Event listeners define the data flow within the app. In the example above, the listener ties the two Textboxes together. The Textbox `name` acts as the input and Textbox `output` acts as the output to the `greet` method. This dataflow is triggered when the Button `greet_btn` is clicked. Like an Interface, an event listener can take multiple inputs or outputs. + +You can also attach event listeners using decorators - skip the `fn` argument and assign `inputs` and `outputs` directly: + +$code_hello_blocks_decorator + +## Event Listeners and Interactivity + +In the example above, you'll notice that you are able to edit Textbox `name`, but not Textbox `output`. This is because any Component that acts as an input to an event listener is made interactive. However, since Textbox `output` acts only as an output, Gradio determines that it should not be made interactive. You can override the default behavior and directly configure the interactivity of a Component with the boolean `interactive` keyword argument. + +```python +output = gr.Textbox(label="Output", interactive=True) +``` + +_Note_: What happens if a Gradio component is neither an input nor an output? If a component is constructed with a default value, then it is presumed to be displaying content and is rendered non-interactive. Otherwise, it is rendered interactive. Again, this behavior can be overridden by specifying a value for the `interactive` argument. + +## Types of Event Listeners + +Take a look at the demo below: + +$code_blocks_hello +$demo_blocks_hello + +Instead of being triggered by a click, the `welcome` function is triggered by typing in the Textbox `inp`. This is due to the `change()` event listener. Different Components support different event listeners. For example, the `Video` Component supports a `play()` event listener, triggered when a user presses play. See the [Docs](http://gradio.app/docs#components) for the event listeners for each Component. + +## Multiple Data Flows + +A Blocks app is not limited to a single data flow the way Interfaces are. Take a look at the demo below: + +$code_reversible_flow +$demo_reversible_flow + +Note that `num1` can act as input to `num2`, and also vice-versa! As your apps get more complex, you will have many data flows connecting various Components. + +Here's an example of a "multi-step" demo, where the output of one model (a speech-to-text model) gets fed into the next model (a sentiment classifier). + +$code_blocks_speech_text_sentiment +$demo_blocks_speech_text_sentiment + +## Function Input List vs Dict + +The event listeners you've seen so far have a single input component. If you'd like to have multiple input components pass data to the function, you have two options on how the function can accept input component values: + +1. as a list of arguments, or +2. as a single dictionary of values, keyed by the component + +Let's see an example of each: +$code_calculator_list_and_dict + +Both `add()` and `sub()` take `a` and `b` as inputs. However, the syntax is different between these listeners. + +1. To the `add_btn` listener, we pass the inputs as a list. The function `add()` takes each of these inputs as arguments. The value of `a` maps to the argument `num1`, and the value of `b` maps to the argument `num2`. +2. To the `sub_btn` listener, we pass the inputs as a set (note the curly brackets!). The function `sub()` takes a single dictionary argument `data`, where the keys are the input components, and the values are the values of those components. + +It is a matter of preference which syntax you prefer! For functions with many input components, option 2 may be easier to manage. + +$demo_calculator_list_and_dict + +## Function Return List vs Dict + +Similarly, you may return values for multiple output components either as: + +1. a list of values, or +2. a dictionary keyed by the component + +Let's first see an example of (1), where we set the values of two output components by returning two values: + +```python +with gr.Blocks() as demo: + food_box = gr.Number(value=10, label="Food Count") + status_box = gr.Textbox() + def eat(food): + if food > 0: + return food - 1, "full" + else: + return 0, "hungry" + gr.Button("EAT").click( + fn=eat, + inputs=food_box, + outputs=[food_box, status_box] + ) +``` + +Above, each return statement returns two values corresponding to `food_box` and `status_box`, respectively. + +Instead of returning a list of values corresponding to each output component in order, you can also return a dictionary, with the key corresponding to the output component and the value as the new value. This also allows you to skip updating some output components. + +```python +with gr.Blocks() as demo: + food_box = gr.Number(value=10, label="Food Count") + status_box = gr.Textbox() + def eat(food): + if food > 0: + return {food_box: food - 1, status_box: "full"} + else: + return {status_box: "hungry"} + gr.Button("EAT").click( + fn=eat, + inputs=food_box, + outputs=[food_box, status_box] + ) +``` + +Notice how when there is no food, we only update the `status_box` element. We skipped updating the `food_box` component. + +Dictionary returns are helpful when an event listener affects many components on return, or conditionally affects outputs and not others. + +Keep in mind that with dictionary returns, we still need to specify the possible outputs in the event listener. + +## Updating Component Configurations + +The return value of an event listener function is usually the updated value of the corresponding output Component. Sometimes we want to update the configuration of the Component as well, such as the visibility. In this case, we return a new Component, setting the properties we want to change. + +$code_blocks_essay_simple +$demo_blocks_essay_simple + +See how we can configure the Textbox itself through a new `gr.Textbox()` method. The `value=` argument can still be used to update the value along with Component configuration. Any arguments we do not set will use their previous values. + +## Running Events Consecutively + +You can also run events consecutively by using the `then` method of an event listener. This will run an event after the previous event has finished running. This is useful for running events that update components in multiple steps. + +For example, in the chatbot example below, we first update the chatbot with the user message immediately, and then update the chatbot with the computer response after a simulated delay. + +$code_chatbot_consecutive +$demo_chatbot_consecutive + +The `.then()` method of an event listener executes the subsequent event regardless of whether the previous event raised any errors. If you'd like to only run subsequent events if the previous event executed successfully, use the `.success()` method, which takes the same arguments as `.then()`. + +## Running Events Continuously + +You can run events on a fixed schedule using the `every` parameter of the event listener. This will run the event +`every` number of seconds while the client connection is open. If the connection is closed, the event will stop running after the following iteration. +Note that this does not take into account the runtime of the event itself. So a function +with a 1 second runtime running with `every=5`, would actually run every 6 seconds. + +Here is an example of a sine curve that updates every second! + +$code_sine_curve +$demo_sine_curve + +## Gathering Event Data + +You can gather specific data about an event by adding the associated event data class as a type hint to an argument in the event listener function. + +For example, event data for `.select()` can be type hinted by a `gradio.SelectData` argument. This event is triggered when a user selects some part of the triggering component, and the event data includes information about what the user specifically selected. If a user selected a specific word in a `Textbox`, a specific image in a `Gallery`, or a specific cell in a `DataFrame`, the event data argument would contain information about the specific selection. + +In the 2 player tic-tac-toe demo below, a user can select a cell in the `DataFrame` to make a move. The event data argument contains information about the specific cell that was selected. We can first check to see if the cell is empty, and then update the cell with the user's move. + +$code_tictactoe +$demo_tictactoe + +## Binding Multiple Triggers to a Function + +Often times, you may want to bind multiple triggers to the same function. For example, you may want to allow a user to click a submit button, or press enter to submit a form. You can do this using the `gr.on` method and passing a list of triggers to the `trigger`. + +$code_on_listener_basic +$demo_on_listener_basic + +You can use decorator syntax as well: + +$code_on_listener_decorator + +You can use `gr.on` to create "live" events by binding to the change event of all components. If you do not specify any triggers, the function will automatically bind to the change event of all input components. + +$code_on_listener_live +$demo_on_listener_live + +You can follow `gr.on` with `.then`, just like any regular event listener. This handy method should save you from having to write a lot of repetitive code! \ No newline at end of file diff --git a/testbed/gradio-app__gradio/guides/03_building-with-blocks/02_controlling-layout.md b/testbed/gradio-app__gradio/guides/03_building-with-blocks/02_controlling-layout.md new file mode 100644 index 0000000000000000000000000000000000000000..ac91c523e947c79c7e209e81556e4c0cba50c8fa --- /dev/null +++ b/testbed/gradio-app__gradio/guides/03_building-with-blocks/02_controlling-layout.md @@ -0,0 +1,95 @@ +# Controlling Layout + +By default, Components in Blocks are arranged vertically. Let's take a look at how we can rearrange Components. Under the hood, this layout structure uses the [flexbox model of web development](https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Flexible_Box_Layout/Basic_Concepts_of_Flexbox). + +## Rows + +Elements within a `with gr.Row` clause will all be displayed horizontally. For example, to display two Buttons side by side: + +```python +with gr.Blocks() as demo: + with gr.Row(): + btn1 = gr.Button("Button 1") + btn2 = gr.Button("Button 2") +``` + +To make every element in a Row have the same height, use the `equal_height` argument of the `style` method. + +```python +with gr.Blocks() as demo: + with gr.Row(equal_height=True): + textbox = gr.Textbox() + btn2 = gr.Button("Button 2") +``` + +The widths of elements in a Row can be controlled via a combination of `scale` and `min_width` arguments that are present in every Component. + +- `scale` is an integer that defines how an element will take up space in a Row. If scale is set to `0`, and element will not expand to take up space. If scale is set to `1` or greater, the element well expand. Multiple elements in a row will expand proportional to their scale. Below, `btn1` will expand twice as much as `btn2`, while `btn0` will not expand at all: + +```python +with gr.Blocks() as demo: + with gr.Row(): + btn0 = gr.Button("Button 0", scale=0) + btn1 = gr.Button("Button 1", scale=1) + btn2 = gr.Button("Button 2", scale=2) +``` + +- `min_width` will set the minimum width the element will take. The Row will wrap if there isn't sufficient space to satisfy all `min_width` values. + +Learn more about Rows in the [docs](https://gradio.app/docs/#row). + +## Columns and Nesting + +Components within a Column will be placed vertically atop each other. Since the vertical layout is the default layout for Blocks apps anyway, to be useful, Columns are usually nested within Rows. For example: + +$code_rows_and_columns +$demo_rows_and_columns + +See how the first column has two Textboxes arranged vertically. The second column has an Image and Button arranged vertically. Notice how the relative widths of the two columns is set by the `scale` parameter. The column with twice the `scale` value takes up twice the width. + +Learn more about Columns in the [docs](https://gradio.app/docs/#column). + +## Tabs and Accordions + +You can also create Tabs using the `with gr.Tab('tab_name'):` clause. Any component created inside of a `with gr.Tab('tab_name'):` context appears in that tab. Consecutive Tab clauses are grouped together so that a single tab can be selected at one time, and only the components within that Tab's context are shown. + +For example: + +$code_blocks_flipper +$demo_blocks_flipper + +Also note the `gr.Accordion('label')` in this example. The Accordion is a layout that can be toggled open or closed. Like `Tabs`, it is a layout element that can selectively hide or show content. Any components that are defined inside of a `with gr.Accordion('label'):` will be hidden or shown when the accordion's toggle icon is clicked. + +Learn more about [Tabs](https://gradio.app/docs/#tab) and [Accordions](https://gradio.app/docs/#accordion) in the docs. + +## Visibility + +Both Components and Layout elements have a `visible` argument that can set initially and also updated. Setting `gr.Column(visible=...)` on a Column can be used to show or hide a set of Components. + +$code_blocks_form +$demo_blocks_form + +## Variable Number of Outputs + +By adjusting the visibility of components in a dynamic way, it is possible to create +demos with Gradio that support a _variable numbers of outputs_. Here's a very simple example +where the number of output textboxes is controlled by an input slider: + +$code_variable_outputs +$demo_variable_outputs + +## Defining and Rendering Components Separately + +In some cases, you might want to define components before you actually render them in your UI. For instance, you might want to show an examples section using `gr.Examples` above the corresponding `gr.Textbox` input. Since `gr.Examples` requires as a parameter the input component object, you will need to first define the input component, but then render it later, after you have defined the `gr.Examples` object. + +The solution to this is to define the `gr.Textbox` outside of the `gr.Blocks()` scope and use the component's `.render()` method wherever you'd like it placed in the UI. + +Here's a full code example: + +```python +input_textbox = gr.Textbox() + +with gr.Blocks() as demo: + gr.Examples(["hello", "bonjour", "merhaba"], input_textbox) + input_textbox.render() +``` diff --git a/testbed/gradio-app__gradio/guides/03_building-with-blocks/03_state-in-blocks.md b/testbed/gradio-app__gradio/guides/03_building-with-blocks/03_state-in-blocks.md new file mode 100644 index 0000000000000000000000000000000000000000..310408f07eb0e0c9f0a2936eae1ec9cd689277e7 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/03_building-with-blocks/03_state-in-blocks.md @@ -0,0 +1,30 @@ +# State in Blocks + +We covered [State in Interfaces](https://gradio.app/interface-state), this guide takes a look at state in Blocks, which works mostly the same. + +## Global State + +Global state in Blocks works the same as in Interface. Any variable created outside a function call is a reference shared between all users. + +## Session State + +Gradio supports session **state**, where data persists across multiple submits within a page session, in Blocks apps as well. To reiterate, session data is _not_ shared between different users of your model. To store data in a session state, you need to do three things: + +1. Create a `gr.State()` object. If there is a default value to this stateful object, pass that into the constructor. +2. In the event listener, put the `State` object as an input and output. +3. In the event listener function, add the variable to the input parameters and the return value. + +Let's take a look at a game of hangman. + +$code_hangman +$demo_hangman + +Let's see how we do each of the 3 steps listed above in this game: + +1. We store the used letters in `used_letters_var`. In the constructor of `State`, we set the initial value of this to `[]`, an empty list. +2. In `btn.click()`, we have a reference to `used_letters_var` in both the inputs and outputs. +3. In `guess_letter`, we pass the value of this `State` to `used_letters`, and then return an updated value of this `State` in the return statement. + +With more complex apps, you will likely have many State variables storing session state in a single Blocks app. + +Learn more about `State` in the [docs](https://gradio.app/docs#state). diff --git a/testbed/gradio-app__gradio/guides/03_building-with-blocks/04_custom-CSS-and-JS.md b/testbed/gradio-app__gradio/guides/03_building-with-blocks/04_custom-CSS-and-JS.md new file mode 100644 index 0000000000000000000000000000000000000000..3357dec23c8be857d79a99ef6274acd5af9b9a2d --- /dev/null +++ b/testbed/gradio-app__gradio/guides/03_building-with-blocks/04_custom-CSS-and-JS.md @@ -0,0 +1,58 @@ +# Custom JS and CSS + +This guide covers how to style Blocks with more flexibility, as well as adding Javascript code to event listeners. + +**Warning**: The use of query selectors in custom JS and CSS is _not_ guaranteed to work across Gradio versions as the Gradio HTML DOM may change. We recommend using query selectors sparingly. + +## Custom CSS + +Gradio themes are the easiest way to customize the look and feel of your app. You can choose from a variety of themes, or create your own. To do so, pass the `theme=` kwarg to the `Blocks` constructor. For example: + +```python +with gr.Blocks(theme=gr.themes.Glass()): + ... +``` + +Gradio comes with a set of prebuilt themes which you can load from `gr.themes.*`. You can extend these themes or create your own themes from scratch - see the [Theming guide](/guides/theming-guide) for more details. + +For additional styling ability, you can pass any CSS to your app using the `css=` kwarg. + +The base class for the Gradio app is `gradio-container`, so here's an example that changes the background color of the Gradio app: + +```python +with gr.Blocks(css=".gradio-container {background-color: red}") as demo: + ... +``` + +If you'd like to reference external files in your css, preface the file path (which can be a relative or absolute path) with `"file="`, for example: + +```python +with gr.Blocks(css=".gradio-container {background: url('file=clouds.jpg')}") as demo: + ... +``` + +You can also pass the filepath to a CSS file to the `css` argument. + +## The `elem_id` and `elem_classes` Arguments + +You can `elem_id` to add an HTML element `id` to any component, and `elem_classes` to add a class or list of classes. This will allow you to select elements more easily with CSS. This approach is also more likely to be stable across Gradio versions as built-in class names or ids may change (however, as mentioned in the warning above, we cannot guarantee complete compatibility between Gradio versions if you use custom CSS as the DOM elements may themselves change). + +```python +css = """ +#warning {background-color: #FFCCCB} +.feedback textarea {font-size: 24px !important} +""" + +with gr.Blocks(css=css) as demo: + box1 = gr.Textbox(value="Good Job", elem_classes="feedback") + box2 = gr.Textbox(value="Failure", elem_id="warning", elem_classes="feedback") +``` + +The CSS `#warning` ruleset will only target the second Textbox, while the `.feedback` ruleset will target both. Note that when targeting classes, you might need to put the `!important` selector to override the default Gradio styles. + +## Custom JS + +Event listeners have a `_js` argument that can take a Javascript function as a string and treat it just like a Python event listener function. You can pass both a Javascript function and a Python function (in which case the Javascript function is run first) or only Javascript (and set the Python `fn` to `None`). Take a look at the code below: + +$code_blocks_js_methods +$demo_blocks_js_methods diff --git a/testbed/gradio-app__gradio/guides/03_building-with-blocks/05_using-blocks-like-functions.md b/testbed/gradio-app__gradio/guides/03_building-with-blocks/05_using-blocks-like-functions.md new file mode 100644 index 0000000000000000000000000000000000000000..29062ca456c1008c1a678d28f7fe7d64f5c943ca --- /dev/null +++ b/testbed/gradio-app__gradio/guides/03_building-with-blocks/05_using-blocks-like-functions.md @@ -0,0 +1,90 @@ +# Using Gradio Blocks Like Functions + +Tags: TRANSLATION, HUB, SPACES + +**Prerequisite**: This Guide builds on the Blocks Introduction. Make sure to [read that guide first](https://gradio.app/guides/quickstart/#blocks-more-flexibility-and-control). + +## Introduction + +Did you know that apart from being a full-stack machine learning demo, a Gradio Blocks app is also a regular-old python function!? + +This means that if you have a gradio Blocks (or Interface) app called `demo`, you can use `demo` like you would any python function. + +So doing something like `output = demo("Hello", "friend")` will run the first event defined in `demo` on the inputs "Hello" and "friend" and store it +in the variable `output`. + +If I put you to sleep 🥱, please bear with me! By using apps like functions, you can seamlessly compose Gradio apps. +The following section will show how. + +## Treating Blocks like functions + +Let's say we have the following demo that translates english text to german text. + +$code_english_translator + +I already went ahead and hosted it in Hugging Face spaces at [gradio/english_translator](https://huggingface.co/spaces/gradio/english_translator). + +You can see the demo below as well: + +$demo_english_translator + +Now, let's say you have an app that generates english text, but you wanted to additionally generate german text. + +You could either: + +1. Copy the source code of my english-to-german translation and paste it in your app. + +2. Load my english-to-german translation in your app and treat it like a normal python function. + +Option 1 technically always works, but it often introduces unwanted complexity. + +Option 2 lets you borrow the functionality you want without tightly coupling our apps. + +All you have to do is call the `Blocks.load` class method in your source file. +After that, you can use my translation app like a regular python function! + +The following code snippet and demo shows how to use `Blocks.load`. + +Note that the variable `english_translator` is my english to german app, but its used in `generate_text` like a regular function. + +$code_generate_english_german + +$demo_generate_english_german + +## How to control which function in the app to use + +If the app you are loading defines more than one function, you can specify which function to use +with the `fn_index` and `api_name` parameters. + +In the code for our english to german demo, you'll see the following line: + +```python +translate_btn.click(translate, inputs=english, outputs=german, api_name="translate-to-german") +``` + +The `api_name` gives this function a unique name in our app. You can use this name to tell gradio which +function in the upstream space you want to use: + +```python +english_generator(text, api_name="translate-to-german")[0]["generated_text"] +``` + +You can also use the `fn_index` parameter. +Imagine my app also defined an english to spanish translation function. +In order to use it in our text generation app, we would use the following code: + +```python +english_generator(text, fn_index=1)[0]["generated_text"] +``` + +Functions in gradio spaces are zero-indexed, so since the spanish translator would be the second function in my space, +you would use index 1. + +## Parting Remarks + +We showed how treating a Blocks app like a regular python helps you compose functionality across different apps. +Any Blocks app can be treated like a function, but a powerful pattern is to `load` an app hosted on +[Hugging Face Spaces](https://huggingface.co/spaces) prior to treating it like a function in your own app. +You can also load models hosted on the [Hugging Face Model Hub](https://huggingface.co/models) - see the [Using Hugging Face Integrations](/using_hugging_face_integrations) guide for an example. + +### Happy building! ⚒️ diff --git a/testbed/gradio-app__gradio/guides/04_chatbots/01_creating-a-chatbot-fast.md b/testbed/gradio-app__gradio/guides/04_chatbots/01_creating-a-chatbot-fast.md new file mode 100644 index 0000000000000000000000000000000000000000..adb189d82f02b8451dc9ced311c4ac9d57e5afac --- /dev/null +++ b/testbed/gradio-app__gradio/guides/04_chatbots/01_creating-a-chatbot-fast.md @@ -0,0 +1,285 @@ +# How to Create a Chatbot with Gradio + +Tags: NLP, TEXT, CHAT + +## Introduction + +Chatbots are a popular application of large language models. Using `gradio`, you can easily build a demo of your chatbot model and share that with your users, or try it yourself using an intuitive chatbot UI. + +This tutorial uses `gr.ChatInterface()`, which is a high-level abstraction that allows you to create your chatbot UI fast, often with a single line of code. The chatbot interface that we create will look something like this: + +$demo_chatinterface_streaming_echo + +We'll start with a couple of simple examples, and then show how to use `gr.ChatInterface()` with real language models from several popular APIs and libraries, including `langchain`, `openai`, and Hugging Face. + +**Prerequisites**: please make sure you are using the **latest version** version of Gradio: + +```bash +$ pip install --upgrade gradio +``` + +## Defining a chat function + +When working with `gr.ChatInterface()`, the first thing you should do is define your chat function. Your chat function should take two arguments: `message` and then `history` (the arguments can be named anything, but must be in this order). + +- `message`: a `str` representing the user's input. +- `history`: a `list` of `list` representing the conversations up until that point. Each inner list consists of two `str` representing a pair: `[user input, bot response]`. + +Your function should return a single string response, which is the bot's response to the particular user input `message`. Your function can take into account the `history` of messages, as well as the current message. + +Let's take a look at a few examples. + +## Example: a chatbot that responds yes or no + +Let's write a chat function that responds `Yes` or `No` randomly. + +Here's our chat function: + +```python +import random + +def random_response(message, history): + return random.choice(["Yes", "No"]) +``` + +Now, we can plug this into `gr.ChatInterface()` and call the `.launch()` method to create the web interface: + +```python +import gradio as gr + +gr.ChatInterface(random_response).launch() +``` + +That's it! Here's our running demo, try it out: + +$demo_chatinterface_random_response + +## Another example using the user's input and history + +Of course, the previous example was very simplistic, it didn't even take user input or the previous history into account! Here's another simple example showing how to incorporate a user's input as well as the history. + +```python +import random +import gradio as gr + +def alternatingly_agree(message, history): + if len(history) % 2 == 0: + return f"Yes, I do think that '{message}'" + else: + return "I don't think so" + +gr.ChatInterface(alternatingly_agree).launch() +``` + +## Streaming chatbots + +If in your chat function, you use `yield` to generate a sequence of responses, you'll end up with a streaming chatbot. It's that simple! + +```python +import time +import gradio as gr + +def slow_echo(message, history): + for i in range(len(message)): + time.sleep(0.3) + yield "You typed: " + message[: i+1] + +gr.ChatInterface(slow_echo).queue().launch() +``` + +Notice that we've [enabled queuing](/guides/key-features#queuing), which is required to use generator functions. While the response is streaming, the "Submit" button turns into a "Stop" button that can be used to stop the generator function. You can customize the appearance and behavior of the "Stop" button using the `stop_btn` parameter. + +## Customizing your chatbot + +If you're familiar with Gradio's `Interface` class, the `gr.ChatInterface` includes many of the same arguments that you can use to customize the look and feel of your Chatbot. For example, you can: + +- add a title and description above your chatbot using `title` and `description` arguments. +- add a theme or custom css using `theme` and `css` arguments respectively. +- add `examples` and even enable `cache_examples`, which make it easier for users to try it out . +- You can change the text or disable each of the buttons that appear in the chatbot interface: `submit_btn`, `retry_btn`, `undo_btn`, `clear_btn`. + +If you want to customize the `gr.Chatbot` or `gr.Textbox` that compose the `ChatInterface`, then you can pass in your own chatbot or textbox as well. Here's an example of how we can use these parameters: + +```python +import gradio as gr + +def yes_man(message, history): + if message.endswith("?"): + return "Yes" + else: + return "Ask me anything!" + +gr.ChatInterface( + yes_man, + chatbot=gr.Chatbot(height=300), + textbox=gr.Textbox(placeholder="Ask me a yes or no question", container=False, scale=7), + title="Yes Man", + description="Ask Yes Man any question", + theme="soft", + examples=["Hello", "Am I cool?", "Are tomatoes vegetables?"], + cache_examples=True, + retry_btn=None, + undo_btn="Delete Previous", + clear_btn="Clear", +).launch() +``` + +## Additional Inputs + +You may want to add additional parameters to your chatbot and expose them to your users through the Chatbot UI. For example, suppose you want to add a textbox for a system prompt, or a slider that sets the number of tokens in the chatbot's response. The `ChatInterface` class supports an `additional_inputs` parameter which can be used to add additional input components. + +The `additional_inputs` parameters accepts a component or a list of components. You can pass the component instances directly, or use their string shortcuts (e.g. `"textbox"` instead of `gr.Textbox()`). If you pass in component instances, and they have _not_ already been rendered, then the components will appear underneath the chatbot (and any examples) within a `gr.Accordion()`. You can set the label of this accordion using the `additional_inputs_accordion_name` parameter. + +Here's a complete example: + +$code_chatinterface_system_prompt + +If the components you pass into the `additional_inputs` have already been rendered in a parent `gr.Blocks()`, then they will _not_ be re-rendered in the accordion. This provides flexibility in deciding where to lay out the input components. In the example below, we position the `gr.Textbox()` on top of the Chatbot UI, while keeping the slider underneath. + +```python +import gradio as gr +import time + +def echo(message, history, system_prompt, tokens): + response = f"System prompt: {system_prompt}\n Message: {message}." + for i in range(min(len(response), int(tokens))): + time.sleep(0.05) + yield response[: i+1] + +with gr.Blocks() as demo: + system_prompt = gr.Textbox("You are helpful AI.", label="System Prompt") + slider = gr.Slider(10, 100, render=False) + + gr.ChatInterface( + echo, additional_inputs=[system_prompt, slider] + ) + +demo.queue().launch() +``` + +If you need to create something even more custom, then its best to construct the chatbot UI using the low-level `gr.Blocks()` API. We have [a dedicated guide for that here](/guides/creating-a-custom-chatbot-with-blocks). + +## Using your chatbot via an API + +Once you've built your Gradio chatbot and are hosting it on [Hugging Face Spaces](https://hf.space) or somewhere else, then you can query it with a simple API at the `/chat` endpoint. The endpoint just expects the user's message (and potentially additional inputs if you have set any using the `additional_inputs` parameter), and will return the response, internally keeping track of the messages sent so far. + +[](https://github.com/gradio-app/gradio/assets/1778297/7b10d6db-6476-4e2e-bebd-ecda802c3b8f) + +To use the endpoint, you should use either the [Gradio Python Client](/guides/getting-started-with-the-python-client) or the [Gradio JS client](/guides/getting-started-with-the-js-client). + +## A `langchain` example + +Now, let's actually use the `gr.ChatInterface` with some real large language models. We'll start by using `langchain` on top of `openai` to build a general-purpose streaming chatbot application in 19 lines of code. You'll need to have an OpenAI key for this example (keep reading for the free, open-source equivalent!) + +```python +from langchain.chat_models import ChatOpenAI +from langchain.schema import AIMessage, HumanMessage +import openai +import gradio as gr + +os.envrion["OPENAI_API_KEY"] = "sk-..." # Replace with your key + +llm = ChatOpenAI(temperature=1.0, model='gpt-3.5-turbo-0613') + +def predict(message, history): + history_langchain_format = [] + for human, ai in history: + history_langchain_format.append(HumanMessage(content=human)) + history_langchain_format.append(AIMessage(content=ai)) + history_langchain_format.append(HumanMessage(content=message)) + gpt_response = llm(history_langchain_format) + return gpt_response.content + +gr.ChatInterface(predict).launch() +``` + +## A streaming example using `openai` + +Of course, we could also use the `openai` library directy. Here a similar example, but this time with streaming results as well: + +```python +import openai +import gradio as gr + +openai.api_key = "sk-..." # Replace with your key + +def predict(message, history): + history_openai_format = [] + for human, assistant in history: + history_openai_format.append({"role": "user", "content": human }) + history_openai_format.append({"role": "assistant", "content":assistant}) + history_openai_format.append({"role": "user", "content": message}) + + response = openai.ChatCompletion.create( + model='gpt-3.5-turbo', + messages= history_openai_format, + temperature=1.0, + stream=True + ) + + partial_message = "" + for chunk in response: + if len(chunk['choices'][0]['delta']) != 0: + partial_message = partial_message + chunk['choices'][0]['delta']['content'] + yield partial_message + +gr.ChatInterface(predict).queue().launch() +``` + +## Example using a local, open-source LLM with Hugging Face + +Of course, in many cases you want to run a chatbot locally. Here's the equivalent example using Together's RedePajama model, from Hugging Face (this requires you to have a GPU with CUDA). + +```python +import gradio as gr +import torch +from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer +from threading import Thread + +tokenizer = AutoTokenizer.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1") +model = AutoModelForCausalLM.from_pretrained("togethercomputer/RedPajama-INCITE-Chat-3B-v1", torch_dtype=torch.float16) +model = model.to('cuda:0') + +class StopOnTokens(StoppingCriteria): + def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool: + stop_ids = [29, 0] + for stop_id in stop_ids: + if input_ids[0][-1] == stop_id: + return True + return False + +def predict(message, history): + + history_transformer_format = history + [[message, ""]] + stop = StopOnTokens() + + messages = "".join(["".join(["\n:"+item[0], "\n:"+item[1]]) #curr_system_message + + for item in history_transformer_format]) + + model_inputs = tokenizer([messages], return_tensors="pt").to("cuda") + streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True) + generate_kwargs = dict( + model_inputs, + streamer=streamer, + max_new_tokens=1024, + do_sample=True, + top_p=0.95, + top_k=1000, + temperature=1.0, + num_beams=1, + stopping_criteria=StoppingCriteriaList([stop]) + ) + t = Thread(target=model.generate, kwargs=generate_kwargs) + t.start() + + partial_message = "" + for new_token in streamer: + if new_token != '<': + partial_message += new_token + yield partial_message + + +gr.ChatInterface(predict).queue().launch() +``` + +With those examples, you should be all set to create your own Gradio Chatbot demos soon! For building even more custom Chatbot applications, check out [a dedicated guide](/guides/creating-a-custom-chatbot-with-blocks) using the low-level `gr.Blocks()` API. diff --git a/testbed/gradio-app__gradio/guides/04_chatbots/02_creating-a-custom-chatbot-with-blocks.md b/testbed/gradio-app__gradio/guides/04_chatbots/02_creating-a-custom-chatbot-with-blocks.md new file mode 100644 index 0000000000000000000000000000000000000000..5399936dc5a99012a2917b86cf0499b6d241cd28 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/04_chatbots/02_creating-a-custom-chatbot-with-blocks.md @@ -0,0 +1,110 @@ +# How to Create a Custom Chatbot with Gradio Blocks + +Tags: NLP, TEXT, CHAT +Related spaces: https://huggingface.co/spaces/gradio/chatbot_streaming, https://huggingface.co/spaces/project-baize/Baize-7B, + +## Introduction + +**Important Note**: if you are getting started, we recommend using the `gr.ChatInterface` to create chatbots -- its a high-level abstraction that makes it possible to create beautiful chatbot applications fast, often with a single line of code. [Read more about it here](/guides/creating-a-chatbot-fast). + +This tutorial will show how to make chatbot UIs from scratch with Gradio's low-level Blocks API. This will give you full control over your Chatbot UI. You'll start by first creating a a simple chatbot to display text, a second one to stream text responses, and finally a chatbot that can handle media files as well. The chatbot interface that we create will look something like this: + +$demo_chatbot_streaming + +**Prerequisite**: We'll be using the `gradio.Blocks` class to build our Chatbot demo. +You can [read the Guide to Blocks first](https://gradio.app/quickstart/#blocks-more-flexibility-and-control) if you are not already familiar with it. Also please make sure you are using the **latest version** version of Gradio: `pip install --upgrade gradio`. + +## A Simple Chatbot Demo + +Let's start with recreating the simple demo above. As you may have noticed, our bot simply randomly responds "How are you?", "I love you", or "I'm very hungry" to any input. Here's the code to create this with Gradio: + +$code_chatbot_simple + +There are three Gradio components here: + +- A `Chatbot`, whose value stores the entire history of the conversation, as a list of response pairs between the user and bot. +- A `Textbox` where the user can type their message, and then hit enter/submit to trigger the chatbot response +- A `ClearButton` button to clear the Textbox and entire Chatbot history + +We have a single function, `respond()`, which takes in the entire history of the chatbot, appends a random message, waits 1 second, and then returns the updated chat history. The `respond()` function also clears the textbox when it returns. + +Of course, in practice, you would replace `respond()` with your own more complex function, which might call a pretrained model or an API, to generate a response. + +$demo_chatbot_simple + +## Add Streaming to your Chatbot + +There are several ways we can improve the user experience of the chatbot above. First, we can stream responses so the user doesn't have to wait as long for a message to be generated. Second, we can have the user message appear immediately in the chat history, while the chatbot's response is being generated. Here's the code to achieve that: + +$code_chatbot_streaming + +You'll notice that when a user submits their message, we now _chain_ three event events with `.then()`: + +1. The first method `user()` updates the chatbot with the user message and clears the input field. This method also makes the input field non interactive so that the user can't send another message while the chatbot is responding. Because we want this to happen instantly, we set `queue=False`, which would skip any queue had it been enabled. The chatbot's history is appended with `(user_message, None)`, the `None` signifying that the bot has not responded. + +2. The second method, `bot()` updates the chatbot history with the bot's response. Instead of creating a new message, we just replace the previously-created `None` message with the bot's response. Finally, we construct the message character by character and `yield` the intermediate outputs as they are being constructed. Gradio automatically turns any function with the `yield` keyword [into a streaming output interface](/guides/key-features/#iterative-outputs). + +3. The third method makes the input field interactive again so that users can send another message to the bot. + +Of course, in practice, you would replace `bot()` with your own more complex function, which might call a pretrained model or an API, to generate a response. + +Finally, we enable queuing by running `demo.queue()`, which is required for streaming intermediate outputs. You can try the improved chatbot by scrolling to the demo at the top of this page. + +## Liking / Disliking Chat Messages + +Once you've created your `gr.Chatbot`, you can add the ability for users to like or dislike messages. This can be useful if you would like users to vote on a bot's responses or flag inappropriate results. + +To add this functionality to your Chatbot, simply attach a `.like()` event to your Chatbot. A chatbot that has the `.like()` event will automatically feature a thumbs-up icon and a thumbs-down icon next to every bot message. + +The `.like()` method requires you to pass in a function that is called when a user clicks on these icons. In your function, you should have an argument whose type is `gr.LikeData`. Gradio will automatically supply the parameter to this argument with an object that contains information about the liked or disliked message. Here's a simplistic example of how you can have users like or dislike chat messages: + +```py +import gradio as gr + +def greet(history, input): + return history + [(input, "Hello, " + input)] + +def vote(data: gr.LikeData): + if data.liked: + print("You upvoted this response: " + data.value) + else: + print("You downvoted this response: " + data.value) + + +with gr.Blocks() as demo: + chatbot = gr.Chatbot() + textbox = gr.Textbox() + textbox.submit(greet, [chatbot, textbox], [chatbot]) + chatbot.like(vote, None, None) # Adding this line causes the like/dislike icons to appear in your chatbot + +demo.launch() +``` + +## Adding Markdown, Images, Audio, or Videos + +The `gr.Chatbot` component supports a subset of markdown including bold, italics, and code. For example, we could write a function that responds to a user's message, with a bold **That's cool!**, like this: + +```py +def bot(history): + response = "**That's cool!**" + history[-1][1] = response + return history +``` + +In addition, it can handle media files, such as images, audio, and video. To pass in a media file, we must pass in the file as a tuple of two strings, like this: `(filepath, alt_text)`. The `alt_text` is optional, so you can also just pass in a tuple with a single element `(filepath,)`, like this: + +```python +def add_file(history, file): + history = history + [((file.name,), None)] + return history +``` + +Putting this together, we can create a _multimodal_ chatbot with a textbox for a user to submit text and an file upload button to submit images / audio / video files. The rest of the code looks pretty much the same as before: + +$code_chatbot_multimodal +$demo_chatbot_multimodal + +And you're done! That's all the code you need to build an interface for your chatbot model. Finally, we'll end our Guide with some links to Chatbots that are running on Spaces so that you can get an idea of what else is possible: + +- [project-baize/Baize-7B](https://huggingface.co/spaces/project-baize/Baize-7B): A stylized chatbot that allows you to stop generation as well as regenerate responses. +- [MAGAer13/mPLUG-Owl](https://huggingface.co/spaces/MAGAer13/mPLUG-Owl): A multimodal chatbot that allows you to upvote and downvote responses. diff --git a/testbed/gradio-app__gradio/guides/04_chatbots/03_creating-a-discord-bot-from-a-gradio-app.md b/testbed/gradio-app__gradio/guides/04_chatbots/03_creating-a-discord-bot-from-a-gradio-app.md new file mode 100644 index 0000000000000000000000000000000000000000..3a0569e19471a1a9a1db04b7987fa0ad9fb6da0b --- /dev/null +++ b/testbed/gradio-app__gradio/guides/04_chatbots/03_creating-a-discord-bot-from-a-gradio-app.md @@ -0,0 +1,137 @@ +# 🚀 Creating Discord Bots from Gradio Apps 🚀 + +Tags: NLP, TEXT, CHAT + +We're excited to announce that Gradio can now automatically create a discord bot from a deployed app! 🤖 + +Discord is a popular communication platform that allows users to chat and interact with each other in real-time. By turning your Gradio app into a Discord bot, you can bring cutting edge AI to your discord server and give your community a whole new way to interact. + +## 💻 How does it work? 💻 + +With `gradio_client` version `0.3.0`, any gradio `ChatInterface` app on the internet can automatically be deployed as a discord bot via the `deploy_discord` method of the `Client` class. + +Technically, any gradio app that exposes an api route that takes in a single string and outputs a single string can be deployed to discord. In this guide, we will focus on `gr.ChatInterface` as those apps naturally lend themselves to discord's chat functionality. + +## 🛠️ Requirements 🛠️ + +Make sure you have the latest `gradio_client` and `gradio` versions installed. + +```bash +pip install gradio_client>=0.3.0 gradio>=3.38.0 +``` + +Also, make sure you have a [Hugging Face account](https://huggingface.co/) and a [write access token](https://huggingface.co/docs/hub/security-tokens). + +⚠️ Tip ⚠️: Make sure you login to the Hugging Face Hub by running `huggingface-cli login`. This will let you skip passing your token in all subsequent commands in this guide. + +## 🏃‍♀️ Quickstart 🏃‍♀️ + +### Step 1: Implementing our chatbot + +Let's build a very simple Chatbot using `ChatInterface` that simply repeats the user message. Write the following code into an `app.py` + +```python +import gradio as gr + +def slow_echo(message, history): + return message + +demo = gr.ChatInterface(slow_echo).queue().launch() +``` + +### Step 2: Deploying our App + +In order to create a discord bot for our app, it must be accessible over the internet. In this guide, we will use the `gradio deploy` command to deploy our chatbot to Hugging Face spaces from the command line. Run the following command. + +```bash +gradio deploy --title echo-chatbot --app-file app.py +``` + +This command will ask you some questions, e.g. requested hardware, requirements, but the default values will suffice for this guide. +Note the URL of the space that was created. Mine is https://huggingface.co/spaces/freddyaboulton/echo-chatbot + +### Step 3: Creating a Discord Bot + +Turning our space into a discord bot is also a one-liner thanks to the `gradio deploy-discord`. Run the following command: + +```bash +gradio deploy-discord --src freddyaboulton/echo-chatbot +``` + +❗️ Advanced ❗️: If you already have a discord bot token you can pass it to the `deploy-discord` command. Don't worry, if you don't have one yet! + +```bash +gradio deploy-discord --src freddyaboulton/echo-chatbot --discord-bot-token +``` + +Note the URL that gets printed out to the console. Mine is https://huggingface.co/spaces/freddyaboulton/echo-chatbot-gradio-discord-bot + +### Step 4: Getting a Discord Bot Token + +If you didn't have a discord bot token for step 3, go to the URL that got printed in the console and follow the instructions there. +Once you obtain a token, run the command again but this time pass in the token: + +```bash +gradio deploy-discord --src freddyaboulton/echo-chatbot --discord-bot-token +``` + +### Step 5: Add the bot to your server + +Visit the space of your discord bot. You should see "Add this bot to your server by clicking this link:" followed by a URL. Go to that URL and add the bot to your server! + +### Step 6: Use your bot! + +By default the bot can be called by starting a message with `/chat`, e.g. `/chat `. + +⚠️ Tip ⚠️: If either of the deployed spaces goes to sleep, the bot will stop working. By default, spaces go to sleep after 48 hours of inactivity. You can upgrade the hardware of your space to prevent it from going to sleep. See this [guide](https://huggingface.co/docs/hub/spaces-gpus#using-gpu-spaces) for more information. + + + +### Using the `gradio_client.Client` Class + +You can also create a discord bot from a deployed gradio app with python. + +```python +import gradio_client as grc +grc.Client("freddyaboulton/echo-chatbot").deploy_discord() +``` + +## 🦾 Using State of The Art LLMs 🦾 + +We have created an organization on Hugging Face called [gradio-discord-bots](https://huggingface.co/gradio-discord-bots) containing several template spaces that explain how to deploy state of the art LLMs powered by gradio as discord bots. + +The easiest way to get started is by deploying Meta's Llama 2 LLM with 70 billion parameter. Simply go to this [space](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-70b-chat-hf) and follow the instructions. + +The deployment can be done in one line! 🤯 + +```python +import gradio_client as grc +grc.Client("ysharma/Explore_llamav2_with_TGI").deploy_discord(to_id="llama2-70b-discord-bot") +``` + +## 🦜 Additional LLMs 🦜 + +In addition to Meta's 70 billion Llama 2 model, we have prepared template spaces for the following LLMs and deployment options: + +- [gpt-3.5-turbo](https://huggingface.co/spaces/gradio-discord-bots/gpt-35-turbo), powered by openai. Required OpenAI key. +- [falcon-7b-instruct](https://huggingface.co/spaces/gradio-discord-bots/falcon-7b-instruct) powered by Hugging Face Inference Endpoints. +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/Llama-2-13b-chat-hf) powered by Hugging Face Inference Endpoints. +- [Llama-2-13b-chat-hf](https://huggingface.co/spaces/gradio-discord-bots/llama-2-13b-chat-transformers) powered by Hugging Face transformers. + +To deploy any of these models to discord, simply follow the instructions in the linked space for that model. + +## Deploying non-chat gradio apps to discord + +As mentioned above, you don't need a `gr.ChatInterface` if you want to deploy your gradio app to discord. All that's needed is an api route that takes in a single string and outputs a single string. + +The following code will deploy a space that translates english to german as a discord bot. + +```python +import gradio_client as grc +client = grc.Client("freddyaboulton/english-to-german") +client.deploy_discord(api_names=['german']) +``` + +## Conclusion + +That's it for this guide! We're really excited about this feature. Tag [@Gradio](https://twitter.com/Gradio) on twitter and show us how your discord community interacts with your discord bots. diff --git a/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/01_using-hugging-face-integrations.md b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/01_using-hugging-face-integrations.md new file mode 100644 index 0000000000000000000000000000000000000000..56f8b7466f23004ad5d153e89d82da5e6cd1c862 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/01_using-hugging-face-integrations.md @@ -0,0 +1,136 @@ +# Using Hugging Face Integrations + +Related spaces: https://huggingface.co/spaces/gradio/helsinki_translation_en_es +Tags: HUB, SPACES, EMBED + +Contributed by Omar Sanseviero 🦙 + +## Introduction + +The Hugging Face Hub is a central platform that has over 190,000 [models](https://huggingface.co/models), 32,000 [datasets](https://huggingface.co/datasets) and 40,000 [demos](https://huggingface.co/spaces), also known as Spaces. Although Hugging Face is famous for its 🤗 transformers and diffusers libraries, the Hub also supports dozens of ML libraries, such as PyTorch, TensorFlow, spaCy, and many others across a variety of domains, from computer vision to reinforcement learning. + +Gradio has multiple features that make it extremely easy to leverage existing models and Spaces on the Hub. This guide walks through these features. + +## Using regular inference with `pipeline` + +First, let's build a simple interface that translates text from English to Spanish. Between the over a thousand models shared by the University of Helsinki, there is an [existing model](https://huggingface.co/Helsinki-NLP/opus-mt-en-es), `opus-mt-en-es`, that does precisely this! + +The 🤗 transformers library has a very easy-to-use abstraction, [`pipeline()`](https://huggingface.co/docs/transformers/v4.16.2/en/main_classes/pipelines#transformers.pipeline) that handles most of the complex code to offer a simple API for common tasks. By specifying the task and an (optional) model, you can use an existing model with few lines: + +```python +import gradio as gr + +from transformers import pipeline + +pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es") + +def predict(text): + return pipe(text)[0]["translation_text"] + +demo = gr.Interface( + fn=predict, + inputs='text', + outputs='text', +) + +demo.launch() +``` + +But `gradio` actually makes it even easier to convert a `pipeline` to a demo, simply by using the `gradio.Interface.from_pipeline` methods, which skips the need to specify the input and output components: + +```python +from transformers import pipeline +import gradio as gr + +pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es") + +demo = gr.Interface.from_pipeline(pipe) +demo.launch() +``` + +The previous code produces the following interface, which you can try right here in your browser: + + + +## Using Hugging Face Inference API + +Hugging Face has a free service called the [Inference API](https://huggingface.co/inference-api), which allows you to send HTTP requests to models in the Hub. For transformers or diffusers-based models, the API can be 2 to 10 times faster than running the inference yourself. The API is free (rate limited), and you can switch to dedicated [Inference Endpoints](https://huggingface.co/pricing) when you want to use it in production. + +Let's try the same demo as above but using the Inference API instead of loading the model yourself. Given a Hugging Face model supported in the Inference API, Gradio can automatically infer the expected input and output and make the underlying server calls, so you don't have to worry about defining the prediction function. Here is what the code would look like! + +```python +import gradio as gr + +demo = gr.load("Helsinki-NLP/opus-mt-en-es", src="models") + +demo.launch() +``` + +Notice that we just put specify the model name and state that the `src` should be `models` (Hugging Face's Model Hub). There is no need to install any dependencies (except `gradio`) since you are not loading the model on your computer. + +You might notice that the first inference takes about 20 seconds. This happens since the Inference API is loading the model in the server. You get some benefits afterward: + +- The inference will be much faster. +- The server caches your requests. +- You get built-in automatic scaling. + +## Hosting your Gradio demos + +[Hugging Face Spaces](https://hf.co/spaces) allows anyone to host their Gradio demos freely, and uploading your Gradio demos take a couple of minutes. You can head to [hf.co/new-space](https://huggingface.co/new-space), select the Gradio SDK, create an `app.py` file, and voila! You have a demo you can share with anyone else. To learn more, read [this guide how to host on Hugging Face Spaces using the website](https://huggingface.co/blog/gradio-spaces). + +Alternatively, you can create a Space programmatically, making use of the [huggingface_hub client library](https://huggingface.co/docs/huggingface_hub/index) library. Here's an example: + +```python +from huggingface_hub import ( + create_repo, + get_full_repo_name, + upload_file, +) +create_repo(name=target_space_name, token=hf_token, repo_type="space", space_sdk="gradio") +repo_name = get_full_repo_name(model_id=target_space_name, token=hf_token) +file_url = upload_file( + path_or_fileobj="file.txt", + path_in_repo="app.py", + repo_id=repo_name, + repo_type="space", + token=hf_token, +) +``` + +Here, `create_repo` creates a gradio repo with the target name under a specific account using that account's Write Token. `repo_name` gets the full repo name of the related repo. Finally `upload_file` uploads a file inside the repo with the name `app.py`. + +## Embedding your Space demo on other websites + +Throughout this guide, you've seen many embedded Gradio demos. You can also do this on own website! The first step is to create a Hugging Face Space with the demo you want to showcase. Then, [follow the steps here to embed the Space on your website](/guides/sharing-your-app/#embedding-hosted-spaces). + +## Loading demos from Spaces + +You can also use and remix existing Gradio demos on Hugging Face Spaces. For example, you could take two existing Gradio demos and put them as separate tabs and create a new demo. You can run this new demo locally, or upload it to Spaces, allowing endless possibilities to remix and create new demos! + +Here's an example that does exactly that: + +```python +import gradio as gr + +with gr.Blocks() as demo: + with gr.Tab("Translate to Spanish"): + gr.load("gradio/helsinki_translation_en_es", src="spaces") + with gr.Tab("Translate to French"): + gr.load("abidlabs/en2fr", src="spaces") + +demo.launch() +``` + +Notice that we use `gr.load()`, the same method we used to load models using the Inference API. However, here we specify that the `src` is `spaces` (Hugging Face Spaces). + +## Recap + +That's it! Let's recap the various ways Gradio and Hugging Face work together: + +1. You can convert a `transformers` pipeline into a Gradio demo using `from_pipeline()` +2. You can build a demo around the Inference API without having to load the model easily using `gr.load()` +3. You host your Gradio demo on Hugging Face Spaces, either using the GUI or entirely in Python. +4. You can embed Gradio demos that are hosted on Hugging Face Spaces onto your own website. +5. You can load demos from Hugging Face Spaces to remix and create new Gradio demos using `gr.load()`. + +🤗 diff --git a/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-Comet.md b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-Comet.md new file mode 100644 index 0000000000000000000000000000000000000000..cc809bb3d6d4920779bffde838ea93de06f07a16 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-Comet.md @@ -0,0 +1,270 @@ +# Using Gradio and Comet + +Tags: COMET, SPACES +Contributed by the Comet team + +## Introduction + +In this guide we will demonstrate some of the ways you can use Gradio with Comet. We will cover the basics of using Comet with Gradio and show you some of the ways that you can leverage Gradio's advanced features such as [Embedding with iFrames](https://www.gradio.app/guides/sharing-your-app/#embedding-with-iframes) and [State](https://www.gradio.app/docs/#state) to build some amazing model evaluation workflows. + +Here is a list of the topics covered in this guide. + +1. Logging Gradio UI's to your Comet Experiments +2. Embedding Gradio Applications directly into your Comet Projects +3. Embedding Hugging Face Spaces directly into your Comet Projects +4. Logging Model Inferences from your Gradio Application to Comet + +## What is Comet? + +[Comet](https://www.comet.com?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs) is an MLOps Platform that is designed to help Data Scientists and Teams build better models faster! Comet provides tooling to Track, Explain, Manage, and Monitor your models in a single place! It works with Jupyter Notebooks and Scripts and most importantly it's 100% free! + +## Setup + +First, install the dependencies needed to run these examples + +```shell +pip install comet_ml torch torchvision transformers gradio shap requests Pillow +``` + +Next, you will need to [sign up for a Comet Account](https://www.comet.com/signup?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs). Once you have your account set up, [grab your API Key](https://www.comet.com/docs/v2/guides/getting-started/quickstart/#get-an-api-key?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs) and configure your Comet credentials + +If you're running these examples as a script, you can either export your credentials as environment variables + +```shell +export COMET_API_KEY="" +export COMET_WORKSPACE="" +export COMET_PROJECT_NAME="" +``` + +or set them in a `.comet.config` file in your working directory. You file should be formatted in the following way. + +```shell +[comet] +api_key= +workspace= +project_name= +``` + +If you are using the provided Colab Notebooks to run these examples, please run the cell with the following snippet before starting the Gradio UI. Running this cell allows you to interactively add your API key to the notebook. + +```python +import comet_ml +comet_ml.init() +``` + +## 1. Logging Gradio UI's to your Comet Experiments + +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/comet-examples/blob/master/integrations/model-evaluation/gradio/notebooks/Gradio_and_Comet.ipynb) + +In this example, we will go over how to log your Gradio Applications to Comet and interact with them using the Gradio Custom Panel. + +Let's start by building a simple Image Classification example using `resnet18`. + +```python +import comet_ml + +import requests +import torch +from PIL import Image +from torchvision import transforms + +torch.hub.download_url_to_file("https://github.com/pytorch/hub/raw/master/images/dog.jpg", "dog.jpg") + +if torch.cuda.is_available(): + device = "cuda" +else: + device = "cpu" + +model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval() +model = model.to(device) + +# Download human-readable labels for ImageNet. +response = requests.get("https://git.io/JJkYN") +labels = response.text.split("\n") + + +def predict(inp): + inp = Image.fromarray(inp.astype("uint8"), "RGB") + inp = transforms.ToTensor()(inp).unsqueeze(0) + with torch.no_grad(): + prediction = torch.nn.functional.softmax(model(inp.to(device))[0], dim=0) + return {labels[i]: float(prediction[i]) for i in range(1000)} + + +inputs = gr.Image() +outputs = gr.Label(num_top_classes=3) + +io = gr.Interface( + fn=predict, inputs=inputs, outputs=outputs, examples=["dog.jpg"] +) +io.launch(inline=False, share=True) + +experiment = comet_ml.Experiment() +experiment.add_tag("image-classifier") + +io.integrate(comet_ml=experiment) +``` + +The last line in this snippet will log the URL of the Gradio Application to your Comet Experiment. You can find the URL in the Text Tab of your Experiment. + + + +Add the Gradio Panel to your Experiment to interact with your application. + + + +## 2. Embedding Gradio Applications directly into your Comet Projects + + + +If you are permanently hosting your Gradio application, you can embed the UI using the Gradio Panel Extended custom Panel. + +Go to your Comet Project page, and head over to the Panels tab. Click the `+ Add` button to bring up the Panels search page. + +adding-panels + +Next, search for Gradio Panel Extended in the Public Panels section and click `Add`. + +gradio-panel-extended + +Once you have added your Panel, click `Edit` to access to the Panel Options page and paste in the URL of your Gradio application. + +![Edit-Gradio-Panel-Options](https://user-images.githubusercontent.com/7529846/214573001-23814b5a-ca65-4ace-a8a5-b27cdda70f7a.gif) + +Edit-Gradio-Panel-URL + +## 3. Embedding Hugging Face Spaces directly into your Comet Projects + + + +You can also embed Gradio Applications that are hosted on Hugging Faces Spaces into your Comet Projects using the Hugging Face Spaces Panel. + +Go to your Comet Project page, and head over to the Panels tab. Click the `+ Add` button to bring up the Panels search page. Next, search for the Hugging Face Spaces Panel in the Public Panels section and click `Add`. + +huggingface-spaces-panel + +Once you have added your Panel, click Edit to access to the Panel Options page and paste in the path of your Hugging Face Space e.g. `pytorch/ResNet` + +Edit-HF-Space + +## 4. Logging Model Inferences to Comet + + + +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/comet-examples/blob/master/integrations/model-evaluation/gradio/notebooks/Logging_Model_Inferences_with_Comet_and_Gradio.ipynb) + +In the previous examples, we demonstrated the various ways in which you can interact with a Gradio application through the Comet UI. Additionally, you can also log model inferences, such as SHAP plots, from your Gradio application to Comet. + +In the following snippet, we're going to log inferences from a Text Generation model. We can persist an Experiment across multiple inference calls using Gradio's [State](https://www.gradio.app/docs/#state) object. This will allow you to log multiple inferences from a model to a single Experiment. + +```python +import comet_ml +import gradio as gr +import shap +import torch +from transformers import AutoModelForCausalLM, AutoTokenizer + +if torch.cuda.is_available(): + device = "cuda" +else: + device = "cpu" + +MODEL_NAME = "gpt2" + +model = AutoModelForCausalLM.from_pretrained(MODEL_NAME) + +# set model decoder to true +model.config.is_decoder = True +# set text-generation params under task_specific_params +model.config.task_specific_params["text-generation"] = { + "do_sample": True, + "max_length": 50, + "temperature": 0.7, + "top_k": 50, + "no_repeat_ngram_size": 2, +} +model = model.to(device) + +tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) +explainer = shap.Explainer(model, tokenizer) + + +def start_experiment(): + """Returns an APIExperiment object that is thread safe + and can be used to log inferences to a single Experiment + """ + try: + api = comet_ml.API() + workspace = api.get_default_workspace() + project_name = comet_ml.config.get_config()["comet.project_name"] + + experiment = comet_ml.APIExperiment( + workspace=workspace, project_name=project_name + ) + experiment.log_other("Created from", "gradio-inference") + + message = f"Started Experiment: [{experiment.name}]({experiment.url})" + + return (experiment, message) + + except Exception as e: + return None, None + + +def predict(text, state, message): + experiment = state + + shap_values = explainer([text]) + plot = shap.plots.text(shap_values, display=False) + + if experiment is not None: + experiment.log_other("message", message) + experiment.log_html(plot) + + return plot + + +with gr.Blocks() as demo: + start_experiment_btn = gr.Button("Start New Experiment") + experiment_status = gr.Markdown() + + # Log a message to the Experiment to provide more context + experiment_message = gr.Textbox(label="Experiment Message") + experiment = gr.State() + + input_text = gr.Textbox(label="Input Text", lines=5, interactive=True) + submit_btn = gr.Button("Submit") + + output = gr.HTML(interactive=True) + + start_experiment_btn.click( + start_experiment, outputs=[experiment, experiment_status] + ) + submit_btn.click( + predict, inputs=[input_text, experiment, experiment_message], outputs=[output] + ) +``` + +Inferences from this snippet will be saved in the HTML tab of your experiment. + + + +## Conclusion + +We hope you found this guide useful and that it provides some inspiration to help you build awesome model evaluation workflows with Comet and Gradio. + +## How to contribute Gradio demos on HF spaces on the Comet organization + +- Create an account on Hugging Face [here](https://huggingface.co/join). +- Add Gradio Demo under your username, see this [course](https://huggingface.co/course/chapter9/4?fw=pt) for setting up Gradio Demo on Hugging Face. +- Request to join the Comet organization [here](https://huggingface.co/Comet). + +## Additional Resources + +- [Comet Documentation](https://www.comet.com/docs/v2/?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs) diff --git a/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-ONNX-on-Hugging-Face.md b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-ONNX-on-Hugging-Face.md new file mode 100644 index 0000000000000000000000000000000000000000..9893285910887e5a890698f319d8d9c96002ce3a --- /dev/null +++ b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-ONNX-on-Hugging-Face.md @@ -0,0 +1,140 @@ +# Gradio and ONNX on Hugging Face + +Related spaces: https://huggingface.co/spaces/onnx/EfficientNet-Lite4 +Tags: ONNX, SPACES +Contributed by Gradio and the ONNX team + +## Introduction + +In this Guide, we'll walk you through: + +- Introduction of ONNX, ONNX model zoo, Gradio, and Hugging Face Spaces +- How to setup a Gradio demo for EfficientNet-Lite4 +- How to contribute your own Gradio demos for the ONNX organization on Hugging Face + +Here's an [example](https://onnx-efficientnet-lite4.hf.space/) of an ONNX model. + +## What is the ONNX Model Zoo? + +Open Neural Network Exchange ([ONNX](https://onnx.ai/)) is an open standard format for representing machine learning models. ONNX is supported by a community of partners who have implemented it in many frameworks and tools. For example, if you have trained a model in TensorFlow or PyTorch, you can convert it to ONNX easily, and from there run it on a variety of devices using an engine/compiler like ONNX Runtime. + +The [ONNX Model Zoo](https://github.com/onnx/models) is a collection of pre-trained, state-of-the-art models in the ONNX format contributed by community members. Accompanying each model are Jupyter notebooks for model training and running inference with the trained model. The notebooks are written in Python and include links to the training dataset as well as references to the original paper that describes the model architecture. + +## What are Hugging Face Spaces & Gradio? + +### Gradio + +Gradio lets users demo their machine learning models as a web app all in python code. Gradio wraps a python function into a user interface and the demos can be launched inside jupyter notebooks, colab notebooks, as well as embedded in your own website and hosted on Hugging Face Spaces for free. + +Get started [here](https://gradio.app/getting_started) + +### Hugging Face Spaces + +Hugging Face Spaces is a free hosting option for Gradio demos. Spaces comes with 3 SDK options: Gradio, Streamlit and Static HTML demos. Spaces can be public or private and the workflow is similar to github repos. There are over 2000+ spaces currently on Hugging Face. Learn more about spaces [here](https://huggingface.co/spaces/launch). + +### Hugging Face Models + +Hugging Face Model Hub also supports ONNX models and ONNX models can be filtered through the [ONNX tag](https://huggingface.co/models?library=onnx&sort=downloads) + +## How did Hugging Face help the ONNX Model Zoo? + +There are a lot of Jupyter notebooks in the ONNX Model Zoo for users to test models. Previously, users needed to download the models themselves and run those notebooks locally for testing. With Hugging Face, the testing process can be much simpler and more user-friendly. Users can easily try certain ONNX Model Zoo model on Hugging Face Spaces and run a quick demo powered by Gradio with ONNX Runtime, all on cloud without downloading anything locally. Note, there are various runtimes for ONNX, e.g., [ONNX Runtime](https://github.com/microsoft/onnxruntime), [MXNet](https://github.com/apache/incubator-mxnet). + +## What is the role of ONNX Runtime? + +ONNX Runtime is a cross-platform inference and training machine-learning accelerator. It makes live Gradio demos with ONNX Model Zoo model on Hugging Face possible. + +ONNX Runtime inference can enable faster customer experiences and lower costs, supporting models from deep learning frameworks such as PyTorch and TensorFlow/Keras as well as classical machine learning libraries such as scikit-learn, LightGBM, XGBoost, etc. ONNX Runtime is compatible with different hardware, drivers, and operating systems, and provides optimal performance by leveraging hardware accelerators where applicable alongside graph optimizations and transforms. For more information please see the [official website](https://onnxruntime.ai/). + +## Setting up a Gradio Demo for EfficientNet-Lite4 + +EfficientNet-Lite 4 is the largest variant and most accurate of the set of EfficientNet-Lite models. It is an integer-only quantized model that produces the highest accuracy of all of the EfficientNet models. It achieves 80.4% ImageNet top-1 accuracy, while still running in real-time (e.g. 30ms/image) on a Pixel 4 CPU. To learn more read the [model card](https://github.com/onnx/models/tree/main/vision/classification/efficientnet-lite4) + +Here we walk through setting up a example demo for EfficientNet-Lite4 using Gradio + +First we import our dependencies and download and load the efficientnet-lite4 model from the onnx model zoo. Then load the labels from the labels_map.txt file. We then setup our preprocessing functions, load the model for inference, and setup the inference function. Finally, the inference function is wrapped into a gradio interface for a user to interact with. See the full code below. + +```python +import numpy as np +import math +import matplotlib.pyplot as plt +import cv2 +import json +import gradio as gr +from huggingface_hub import hf_hub_download +from onnx import hub +import onnxruntime as ort + +# loads ONNX model from ONNX Model Zoo +model = hub.load("efficientnet-lite4") +# loads the labels text file +labels = json.load(open("labels_map.txt", "r")) + +# sets image file dimensions to 224x224 by resizing and cropping image from center +def pre_process_edgetpu(img, dims): + output_height, output_width, _ = dims + img = resize_with_aspectratio(img, output_height, output_width, inter_pol=cv2.INTER_LINEAR) + img = center_crop(img, output_height, output_width) + img = np.asarray(img, dtype='float32') + # converts jpg pixel value from [0 - 255] to float array [-1.0 - 1.0] + img -= [127.0, 127.0, 127.0] + img /= [128.0, 128.0, 128.0] + return img + +# resizes the image with a proportional scale +def resize_with_aspectratio(img, out_height, out_width, scale=87.5, inter_pol=cv2.INTER_LINEAR): + height, width, _ = img.shape + new_height = int(100. * out_height / scale) + new_width = int(100. * out_width / scale) + if height > width: + w = new_width + h = int(new_height * height / width) + else: + h = new_height + w = int(new_width * width / height) + img = cv2.resize(img, (w, h), interpolation=inter_pol) + return img + +# crops the image around the center based on given height and width +def center_crop(img, out_height, out_width): + height, width, _ = img.shape + left = int((width - out_width) / 2) + right = int((width + out_width) / 2) + top = int((height - out_height) / 2) + bottom = int((height + out_height) / 2) + img = img[top:bottom, left:right] + return img + + +sess = ort.InferenceSession(model) + +def inference(img): + img = cv2.imread(img) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + + img = pre_process_edgetpu(img, (224, 224, 3)) + + img_batch = np.expand_dims(img, axis=0) + + results = sess.run(["Softmax:0"], {"images:0": img_batch})[0] + result = reversed(results[0].argsort()[-5:]) + resultdic = {} + for r in result: + resultdic[labels[str(r)]] = float(results[0][r]) + return resultdic + +title = "EfficientNet-Lite4" +description = "EfficientNet-Lite 4 is the largest variant and most accurate of the set of EfficientNet-Lite model. It is an integer-only quantized model that produces the highest accuracy of all of the EfficientNet models. It achieves 80.4% ImageNet top-1 accuracy, while still running in real-time (e.g. 30ms/image) on a Pixel 4 CPU." +examples = [['catonnx.jpg']] +gr.Interface(inference, gr.Image(type="filepath"), "label", title=title, description=description, examples=examples).launch() +``` + +## How to contribute Gradio demos on HF spaces using ONNX models + +- Add model to the [onnx model zoo](https://github.com/onnx/models/blob/main/.github/PULL_REQUEST_TEMPLATE.md) +- Create an account on Hugging Face [here](https://huggingface.co/join). +- See list of models left to add to ONNX organization, please refer to the table with the [Models list](https://github.com/onnx/models#models) +- Add Gradio Demo under your username, see this [blog post](https://huggingface.co/blog/gradio-spaces) for setting up Gradio Demo on Hugging Face. +- Request to join ONNX Organization [here](https://huggingface.co/onnx). +- Once approved transfer model from your username to ONNX organization +- Add a badge for model in model table, see examples in [Models list](https://github.com/onnx/models#models) diff --git a/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-Wandb-Integration.md b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-Wandb-Integration.md new file mode 100644 index 0000000000000000000000000000000000000000..0b75d696354dc52a60ffc66e0ced600c598b44ad --- /dev/null +++ b/testbed/gradio-app__gradio/guides/05_integrating-other-frameworks/Gradio-and-Wandb-Integration.md @@ -0,0 +1,366 @@ +# Gradio and W&B Integration + +Related spaces: https://huggingface.co/spaces/akhaliq/JoJoGAN +Tags: WANDB, SPACES +Contributed by Gradio team + +## Introduction + +In this Guide, we'll walk you through: + +- Introduction of Gradio, and Hugging Face Spaces, and Wandb +- How to setup a Gradio demo using the Wandb integration for JoJoGAN +- How to contribute your own Gradio demos after tracking your experiments on wandb to the Wandb organization on Hugging Face + + +## What is Wandb? + +Weights and Biases (W&B) allows data scientists and machine learning scientists to track their machine learning experiments at every stage, from training to production. Any metric can be aggregated over samples and shown in panels in a customizable and searchable dashboard, like below: + +Screen Shot 2022-08-01 at 5 54 59 PM + +## What are Hugging Face Spaces & Gradio? + +### Gradio + +Gradio lets users demo their machine learning models as a web app, all in a few lines of Python. Gradio wraps any Python function (such as a machine learning model's inference function) into a user interface and the demos can be launched inside jupyter notebooks, colab notebooks, as well as embedded in your own website and hosted on Hugging Face Spaces for free. + +Get started [here](https://gradio.app/getting_started) + +### Hugging Face Spaces + +Hugging Face Spaces is a free hosting option for Gradio demos. Spaces comes with 3 SDK options: Gradio, Streamlit and Static HTML demos. Spaces can be public or private and the workflow is similar to github repos. There are over 2000+ spaces currently on Hugging Face. Learn more about spaces [here](https://huggingface.co/spaces/launch). + +## Setting up a Gradio Demo for JoJoGAN + +Now, let's walk you through how to do this on your own. We'll make the assumption that you're new to W&B and Gradio for the purposes of this tutorial. + +Let's get started! + +1. Create a W&B account + + Follow [these quick instructions](https://app.wandb.ai/login) to create your free account if you don’t have one already. It shouldn't take more than a couple minutes. Once you're done (or if you've already got an account), next, we'll run a quick colab. + +2. Open Colab Install Gradio and W&B + + We'll be following along with the colab provided in the JoJoGAN repo with some minor modifications to use Wandb and Gradio more effectively. + + [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/mchong6/JoJoGAN/blob/main/stylize.ipynb) + + Install Gradio and Wandb at the top: + +```sh + +pip install gradio wandb +``` + +3. Finetune StyleGAN and W&B experiment tracking + + This next step will open a W&B dashboard to track your experiments and a gradio panel showing pretrained models to choose from a drop down menu from a Gradio Demo hosted on Huggingface Spaces. Here's the code you need for that: + + ```python + + alpha = 1.0 + alpha = 1-alpha + + preserve_color = True + num_iter = 100 + log_interval = 50 + + + samples = [] + column_names = ["Reference (y)", "Style Code(w)", "Real Face Image(x)"] + + wandb.init(project="JoJoGAN") + config = wandb.config + config.num_iter = num_iter + config.preserve_color = preserve_color + wandb.log( + {"Style reference": [wandb.Image(transforms.ToPILImage()(target_im))]}, + step=0) + + # load discriminator for perceptual loss + discriminator = Discriminator(1024, 2).eval().to(device) + ckpt = torch.load('models/stylegan2-ffhq-config-f.pt', map_location=lambda storage, loc: storage) + discriminator.load_state_dict(ckpt["d"], strict=False) + + # reset generator + del generator + generator = deepcopy(original_generator) + + g_optim = optim.Adam(generator.parameters(), lr=2e-3, betas=(0, 0.99)) + + # Which layers to swap for generating a family of plausible real images -> fake image + if preserve_color: + id_swap = [9,11,15,16,17] + else: + id_swap = list(range(7, generator.n_latent)) + + for idx in tqdm(range(num_iter)): + mean_w = generator.get_latent(torch.randn([latents.size(0), latent_dim]).to(device)).unsqueeze(1).repeat(1, generator.n_latent, 1) + in_latent = latents.clone() + in_latent[:, id_swap] = alpha*latents[:, id_swap] + (1-alpha)*mean_w[:, id_swap] + + img = generator(in_latent, input_is_latent=True) + + with torch.no_grad(): + real_feat = discriminator(targets) + fake_feat = discriminator(img) + + loss = sum([F.l1_loss(a, b) for a, b in zip(fake_feat, real_feat)])/len(fake_feat) + + + wandb.log({"loss": loss}, step=idx) + if idx % log_interval == 0: + generator.eval() + my_sample = generator(my_w, input_is_latent=True) + generator.train() + my_sample = transforms.ToPILImage()(utils.make_grid(my_sample, normalize=True, range=(-1, 1))) + wandb.log( + {"Current stylization": [wandb.Image(my_sample)]}, + step=idx) + table_data = [ + wandb.Image(transforms.ToPILImage()(target_im)), + wandb.Image(img), + wandb.Image(my_sample), + ] + samples.append(table_data) + + g_optim.zero_grad() + loss.backward() + g_optim.step() + + out_table = wandb.Table(data=samples, columns=column_names) + wandb.log({"Current Samples": out_table}) + ``` + +alpha = 1.0 +alpha = 1-alpha + +preserve_color = True +num_iter = 100 +log_interval = 50 + +samples = [] +column_names = ["Referece (y)", "Style Code(w)", "Real Face Image(x)"] + +wandb.init(project="JoJoGAN") +config = wandb.config +config.num_iter = num_iter +config.preserve_color = preserve_color +wandb.log( +{"Style reference": [wandb.Image(transforms.ToPILImage()(target_im))]}, +step=0) + +# load discriminator for perceptual loss + +discriminator = Discriminator(1024, 2).eval().to(device) +ckpt = torch.load('models/stylegan2-ffhq-config-f.pt', map_location=lambda storage, loc: storage) +discriminator.load_state_dict(ckpt["d"], strict=False) + +# reset generator + +del generator +generator = deepcopy(original_generator) + +g_optim = optim.Adam(generator.parameters(), lr=2e-3, betas=(0, 0.99)) + +# Which layers to swap for generating a family of plausible real images -> fake image + +if preserve_color: +id_swap = [9,11,15,16,17] +else: +id_swap = list(range(7, generator.n_latent)) + +for idx in tqdm(range(num_iter)): +mean_w = generator.get_latent(torch.randn([latents.size(0), latent_dim]).to(device)).unsqueeze(1).repeat(1, generator.n_latent, 1) +in_latent = latents.clone() +in_latent[:, id_swap] = alpha*latents[:, id_swap] + (1-alpha)*mean_w[:, id_swap] + + img = generator(in_latent, input_is_latent=True) + + with torch.no_grad(): + real_feat = discriminator(targets) + fake_feat = discriminator(img) + + loss = sum([F.l1_loss(a, b) for a, b in zip(fake_feat, real_feat)])/len(fake_feat) + + + wandb.log({"loss": loss}, step=idx) + if idx % log_interval == 0: + generator.eval() + my_sample = generator(my_w, input_is_latent=True) + generator.train() + my_sample = transforms.ToPILImage()(utils.make_grid(my_sample, normalize=True, range=(-1, 1))) + wandb.log( + {"Current stylization": [wandb.Image(my_sample)]}, + step=idx) + table_data = [ + wandb.Image(transforms.ToPILImage()(target_im)), + wandb.Image(img), + wandb.Image(my_sample), + ] + samples.append(table_data) + + g_optim.zero_grad() + loss.backward() + g_optim.step() + +out_table = wandb.Table(data=samples, columns=column_names) +wandb.log({"Current Samples": out_table}) + +```` + +4. Save, Download, and Load Model + + Here's how to save and download your model. + +```python + +from PIL import Image +import torch +torch.backends.cudnn.benchmark = True +from torchvision import transforms, utils +from util import * +import math +import random +import numpy as np +from torch import nn, autograd, optim +from torch.nn import functional as F +from tqdm import tqdm +import lpips +from model import * +from e4e_projection import projection as e4e_projection + +from copy import deepcopy +import imageio + +import os +import sys +import torchvision.transforms as transforms +from argparse import Namespace +from e4e.models.psp import pSp +from util import * +from huggingface_hub import hf_hub_download +from google.colab import files + +torch.save({"g": generator.state_dict()}, "your-model-name.pt") + +files.download('your-model-name.pt') + +latent_dim = 512 +device="cuda" +model_path_s = hf_hub_download(repo_id="akhaliq/jojogan-stylegan2-ffhq-config-f", filename="stylegan2-ffhq-config-f.pt") +original_generator = Generator(1024, latent_dim, 8, 2).to(device) +ckpt = torch.load(model_path_s, map_location=lambda storage, loc: storage) +original_generator.load_state_dict(ckpt["g_ema"], strict=False) +mean_latent = original_generator.mean_latent(10000) + +generator = deepcopy(original_generator) + +ckpt = torch.load("/content/JoJoGAN/your-model-name.pt", map_location=lambda storage, loc: storage) +generator.load_state_dict(ckpt["g"], strict=False) +generator.eval() + +plt.rcParams['figure.dpi'] = 150 + + + +transform = transforms.Compose( + [ + transforms.Resize((1024, 1024)), + transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)), + ] +) + + +def inference(img): + img.save('out.jpg') + aligned_face = align_face('out.jpg') + + my_w = e4e_projection(aligned_face, "out.pt", device).unsqueeze(0) + with torch.no_grad(): + my_sample = generator(my_w, input_is_latent=True) + + + npimage = my_sample[0].cpu().permute(1, 2, 0).detach().numpy() + imageio.imwrite('filename.jpeg', npimage) + return 'filename.jpeg' +```` + +5. Build a Gradio Demo + +```python + +import gradio as gr + +title = "JoJoGAN" +description = "Gradio Demo for JoJoGAN: One Shot Face Stylization. To use it, simply upload your image, or click one of the examples to load them. Read more at the links below." + +demo = gr.Interface( + inference, + gr.Image(type="pil"), + gr.Image(type="file"), + title=title, + description=description +) + +demo.launch(share=True) +``` + +6. Integrate Gradio into your W&B Dashboard + + The last step—integrating your Gradio demo with your W&B dashboard—is just one extra line: + +```python + +demo.integrate(wandb=wandb) +``` + + Once you call integrate, a demo will be created and you can integrate it into your dashboard or report + + Outside of W&B with Web components, using the gradio-app tags allows anyone can embed Gradio demos on HF spaces directly into their blogs, websites, documentation, etc.: + +```html + +``` + +7. (Optional) Embed W&B plots in your Gradio App + + It's also possible to embed W&B plots within Gradio apps. To do so, you can create a W&B Report of your plots and + embed them within your Gradio app within a `gr.HTML` block. + + The Report will need to be public and you will need to wrap the URL within an iFrame like this: + +```python + +import gradio as gr + +def wandb_report(url): + iframe = f' +
+ +Gradio comes with a set of prebuilt themes which you can load from `gr.themes.*`. These are: + +- `gr.themes.Base()` +- `gr.themes.Default()` +- `gr.themes.Glass()` +- `gr.themes.Monochrome()` +- `gr.themes.Soft()` + +Each of these themes set values for hundreds of CSS variables. You can use prebuilt themes as a starting point for your own custom themes, or you can create your own themes from scratch. Let's take a look at each approach. + +## Using the Theme Builder + +The easiest way to build a theme is using the Theme Builder. To launch the Theme Builder locally, run the following code: + +```python +import gradio as gr + +gr.themes.builder() +``` + +$demo_theme_builder + +You can use the Theme Builder running on Spaces above, though it runs much faster when you launch it locally via `gr.themes.builder()`. + +As you edit the values in the Theme Builder, the app will preview updates in real time. You can download the code to generate the theme you've created so you can use it in any Gradio app. + +In the rest of the guide, we will cover building themes programmatically. + +## Extending Themes via the Constructor + +Although each theme has hundreds of CSS variables, the values for most these variables are drawn from 8 core variables which can be set through the constructor of each prebuilt theme. Modifying these 8 arguments allows you to quickly change the look and feel of your app. + +### Core Colors + +The first 3 constructor arguments set the colors of the theme and are `gradio.themes.Color` objects. Internally, these Color objects hold brightness values for the palette of a single hue, ranging from 50, 100, 200..., 800, 900, 950. Other CSS variables are derived from these 3 colors. + +The 3 color constructor arguments are: + +- `primary_hue`: This is the color draws attention in your theme. In the default theme, this is set to `gradio.themes.colors.orange`. +- `secondary_hue`: This is the color that is used for secondary elements in your theme. In the default theme, this is set to `gradio.themes.colors.blue`. +- `neutral_hue`: This is the color that is used for text and other neutral elements in your theme. In the default theme, this is set to `gradio.themes.colors.gray`. + +You could modify these values using their string shortcuts, such as + +```python +with gr.Blocks(theme=gr.themes.Default(primary_hue="red", secondary_hue="pink")) as demo: + ... +``` + +or you could use the `Color` objects directly, like this: + +```python +with gr.Blocks(theme=gr.themes.Default(primary_hue=gr.themes.colors.red, secondary_hue=gr.themes.colors.pink)) as demo: + ... +``` + +
+ +
+ +Predefined colors are: + +- `slate` +- `gray` +- `zinc` +- `neutral` +- `stone` +- `red` +- `orange` +- `amber` +- `yellow` +- `lime` +- `green` +- `emerald` +- `teal` +- `cyan` +- `sky` +- `blue` +- `indigo` +- `violet` +- `purple` +- `fuchsia` +- `pink` +- `rose` + +You could also create your own custom `Color` objects and pass them in. + +### Core Sizing + +The next 3 constructor arguments set the sizing of the theme and are `gradio.themes.Size` objects. Internally, these Size objects hold pixel size values that range from `xxs` to `xxl`. Other CSS variables are derived from these 3 sizes. + +- `spacing_size`: This sets the padding within and spacing between elements. In the default theme, this is set to `gradio.themes.sizes.spacing_md`. +- `radius_size`: This sets the roundedness of corners of elements. In the default theme, this is set to `gradio.themes.sizes.radius_md`. +- `text_size`: This sets the font size of text. In the default theme, this is set to `gradio.themes.sizes.text_md`. + +You could modify these values using their string shortcuts, such as + +```python +with gr.Blocks(theme=gr.themes.Default(spacing_size="sm", radius_size="none")) as demo: + ... +``` + +or you could use the `Size` objects directly, like this: + +```python +with gr.Blocks(theme=gr.themes.Default(spacing_size=gr.themes.sizes.spacing_sm, radius_size=gr.themes.sizes.radius_none)) as demo: + ... +``` + +
+ +
+ +The predefined size objects are: + +- `radius_none` +- `radius_sm` +- `radius_md` +- `radius_lg` +- `spacing_sm` +- `spacing_md` +- `spacing_lg` +- `text_sm` +- `text_md` +- `text_lg` + +You could also create your own custom `Size` objects and pass them in. + +### Core Fonts + +The final 2 constructor arguments set the fonts of the theme. You can pass a list of fonts to each of these arguments to specify fallbacks. If you provide a string, it will be loaded as a system font. If you provide a `gradio.themes.GoogleFont`, the font will be loaded from Google Fonts. + +- `font`: This sets the primary font of the theme. In the default theme, this is set to `gradio.themes.GoogleFont("Source Sans Pro")`. +- `font_mono`: This sets the monospace font of the theme. In the default theme, this is set to `gradio.themes.GoogleFont("IBM Plex Mono")`. + +You could modify these values such as the following: + +```python +with gr.Blocks(theme=gr.themes.Default(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])) as demo: + ... +``` + +
+ +
+ +## Extending Themes via `.set()` + +You can also modify the values of CSS variables after the theme has been loaded. To do so, use the `.set()` method of the theme object to get access to the CSS variables. For example: + +```python +theme = gr.themes.Default(primary_hue="blue").set( + loader_color="#FF0000", + slider_color="#FF0000", +) + +with gr.Blocks(theme=theme) as demo: + ... +``` + +In the example above, we've set the `loader_color` and `slider_color` variables to `#FF0000`, despite the overall `primary_color` using the blue color palette. You can set any CSS variable that is defined in the theme in this manner. + +Your IDE type hinting should help you navigate these variables. Since there are so many CSS variables, let's take a look at how these variables are named and organized. + +### CSS Variable Naming Conventions + +CSS variable names can get quite long, like `button_primary_background_fill_hover_dark`! However they follow a common naming convention that makes it easy to understand what they do and to find the variable you're looking for. Separated by underscores, the variable name is made up of: + +1. The target element, such as `button`, `slider`, or `block`. +2. The target element type or sub-element, such as `button_primary`, or `block_label`. +3. The property, such as `button_primary_background_fill`, or `block_label_border_width`. +4. Any relevant state, such as `button_primary_background_fill_hover`. +5. If the value is different in dark mode, the suffix `_dark`. For example, `input_border_color_focus_dark`. + +Of course, many CSS variable names are shorter than this, such as `table_border_color`, or `input_shadow`. + +### CSS Variable Organization + +Though there are hundreds of CSS variables, they do not all have to have individual values. They draw their values by referencing a set of core variables and referencing each other. This allows us to only have to modify a few variables to change the look and feel of the entire theme, while also getting finer control of individual elements that we may want to modify. + +#### Referencing Core Variables + +To reference one of the core constructor variables, precede the variable name with an asterisk. To reference a core color, use the `*primary_`, `*secondary_`, or `*neutral_` prefix, followed by the brightness value. For example: + +```python +theme = gr.themes.Default(primary_hue="blue").set( + button_primary_background_fill="*primary_200", + button_primary_background_fill_hover="*primary_300", +) +``` + +In the example above, we've set the `button_primary_background_fill` and `button_primary_background_fill_hover` variables to `*primary_200` and `*primary_300`. These variables will be set to the 200 and 300 brightness values of the blue primary color palette, respectively. + +Similarly, to reference a core size, use the `*spacing_`, `*radius_`, or `*text_` prefix, followed by the size value. For example: + +```python +theme = gr.themes.Default(radius_size="md").set( + button_primary_border_radius="*radius_xl", +) +``` + +In the example above, we've set the `button_primary_border_radius` variable to `*radius_xl`. This variable will be set to the `xl` setting of the medium radius size range. + +#### Referencing Other Variables + +Variables can also reference each other. For example, look at the example below: + +```python +theme = gr.themes.Default().set( + button_primary_background_fill="#FF0000", + button_primary_background_fill_hover="#FF0000", + button_primary_border="#FF0000", +) +``` + +Having to set these values to a common color is a bit tedious. Instead, we can reference the `button_primary_background_fill` variable in the `button_primary_background_fill_hover` and `button_primary_border` variables, using a `*` prefix. + +```python +theme = gr.themes.Default().set( + button_primary_background_fill="#FF0000", + button_primary_background_fill_hover="*button_primary_background_fill", + button_primary_border="*button_primary_background_fill", +) +``` + +Now, if we change the `button_primary_background_fill` variable, the `button_primary_background_fill_hover` and `button_primary_border` variables will automatically update as well. + +This is particularly useful if you intend to share your theme - it makes it easy to modify the theme without having to change every variable. + +Note that dark mode variables automatically reference each other. For example: + +```python +theme = gr.themes.Default().set( + button_primary_background_fill="#FF0000", + button_primary_background_fill_dark="#AAAAAA", + button_primary_border="*button_primary_background_fill", + button_primary_border_dark="*button_primary_background_fill_dark", +) +``` + +`button_primary_border_dark` will draw its value from `button_primary_background_fill_dark`, because dark mode always draw from the dark version of the variable. + +## Creating a Full Theme + +Let's say you want to create a theme from scratch! We'll go through it step by step - you can also see the source of prebuilt themes in the gradio source repo for reference - [here's the source](https://github.com/gradio-app/gradio/blob/main/gradio/themes/monochrome.py) for the Monochrome theme. + +Our new theme class will inherit from `gradio.themes.Base`, a theme that sets a lot of convenient defaults. Let's make a simple demo that creates a dummy theme called Seafoam, and make a simple app that uses it. + +$code_theme_new_step_1 + +
+ +
+ +The Base theme is very barebones, and uses `gr.themes.Blue` as it primary color - you'll note the primary button and the loading animation are both blue as a result. Let's change the defaults core arguments of our app. We'll overwrite the constructor and pass new defaults for the core constructor arguments. + +We'll use `gr.themes.Emerald` as our primary color, and set secondary and neutral hues to `gr.themes.Blue`. We'll make our text larger using `text_lg`. We'll use `Quicksand` as our default font, loaded from Google Fonts. + +$code_theme_new_step_2 + +
+ +
+ +See how the primary button and the loading animation are now green? These CSS variables are tied to the `primary_hue` variable. + +Let's modify the theme a bit more directly. We'll call the `set()` method to overwrite CSS variable values explicitly. We can use any CSS logic, and reference our core constructor arguments using the `*` prefix. + +$code_theme_new_step_3 + +
+ +
+ +Look how fun our theme looks now! With just a few variable changes, our theme looks completely different. + +You may find it helpful to explore the [source code of the other prebuilt themes](https://github.com/gradio-app/gradio/blob/main/gradio/themes) to see how they modified the base theme. You can also find your browser's Inspector useful to select elements from the UI and see what CSS variables are being used in the styles panel. + +## Sharing Themes + +Once you have created a theme, you can upload it to the HuggingFace Hub to let others view it, use it, and build off of it! + +### Uploading a Theme + +There are two ways to upload a theme, via the theme class instance or the command line. We will cover both of them with the previously created `seafoam` theme. + +- Via the class instance + +Each theme instance has a method called `push_to_hub` we can use to upload a theme to the HuggingFace hub. + +```python +seafoam.push_to_hub(repo_name="seafoam", + version="0.0.1", + hf_token="") +``` + +- Via the command line + +First save the theme to disk + +```python +seafoam.dump(filename="seafoam.json") +``` + +Then use the `upload_theme` command: + +```bash +upload_theme\ +"seafoam.json"\ +"seafoam"\ +--version "0.0.1"\ +--hf_token "" +``` + +In order to upload a theme, you must have a HuggingFace account and pass your [Access Token](https://huggingface.co/docs/huggingface_hub/quick-start#login) +as the `hf_token` argument. However, if you log in via the [HuggingFace command line](https://huggingface.co/docs/huggingface_hub/quick-start#login) (which comes installed with `gradio`), +you can omit the `hf_token` argument. + +The `version` argument lets you specify a valid [semantic version](https://www.geeksforgeeks.org/introduction-semantic-versioning/) string for your theme. +That way your users are able to specify which version of your theme they want to use in their apps. This also lets you publish updates to your theme without worrying +about changing how previously created apps look. The `version` argument is optional. If omitted, the next patch version is automatically applied. + +### Theme Previews + +By calling `push_to_hub` or `upload_theme`, the theme assets will be stored in a [HuggingFace space](https://huggingface.co/docs/hub/spaces-overview). + +The theme preview for our seafoam theme is here: [seafoam preview](https://huggingface.co/spaces/gradio/seafoam). + +
+ +
+ +### Discovering Themes + +The [Theme Gallery](https://huggingface.co/spaces/gradio/theme-gallery) shows all the public gradio themes. After publishing your theme, +it will automatically show up in the theme gallery after a couple of minutes. + +You can sort the themes by the number of likes on the space and from most to least recently created as well as toggling themes between light and dark mode. + +
+ +
+ +### Downloading + +To use a theme from the hub, use the `from_hub` method on the `ThemeClass` and pass it to your app: + +```python +my_theme = gr.Theme.from_hub("gradio/seafoam") + +with gr.Blocks(theme=my_theme) as demo: + .... +``` + +You can also pass the theme string directly to `Blocks` or `Interface` (`gr.Blocks(theme="gradio/seafoam")`) + +You can pin your app to an upstream theme version by using semantic versioning expressions. + +For example, the following would ensure the theme we load from the `seafoam` repo was between versions `0.0.1` and `0.1.0`: + +```python +with gr.Blocks(theme="gradio/seafoam@>=0.0.1,<0.1.0") as demo: + .... +``` + +Enjoy creating your own themes! If you make one you're proud of, please share it with the world by uploading it to the hub! +If you tag us on [Twitter](https://twitter.com/gradio) we can give your theme a shout out! + + diff --git a/testbed/gradio-app__gradio/guides/08_other-tutorials/using-flagging.md b/testbed/gradio-app__gradio/guides/08_other-tutorials/using-flagging.md new file mode 100644 index 0000000000000000000000000000000000000000..50f9d62419775ad8f428c98484065447dd718664 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/08_other-tutorials/using-flagging.md @@ -0,0 +1,197 @@ +# Using Flagging + +Related spaces: https://huggingface.co/spaces/gradio/calculator-flagging-crowdsourced, https://huggingface.co/spaces/gradio/calculator-flagging-options, https://huggingface.co/spaces/gradio/calculator-flag-basic +Tags: FLAGGING, DATA + +## Introduction + +When you demo a machine learning model, you might want to collect data from users who try the model, particularly data points in which the model is not behaving as expected. Capturing these "hard" data points is valuable because it allows you to improve your machine learning model and make it more reliable and robust. + +Gradio simplifies the collection of this data by including a **Flag** button with every `Interface`. This allows a user or tester to easily send data back to the machine where the demo is running. In this Guide, we discuss more about how to use the flagging feature, both with `gradio.Interface` as well as with `gradio.Blocks`. + +## The **Flag** button in `gradio.Interface` + +Flagging with Gradio's `Interface` is especially easy. By default, underneath the output components, there is a button marked **Flag**. When a user testing your model sees input with interesting output, they can click the flag button to send the input and output data back to the machine where the demo is running. The sample is saved to a CSV log file (by default). If the demo involves images, audio, video, or other types of files, these are saved separately in a parallel directory and the paths to these files are saved in the CSV file. + +There are [four parameters](https://gradio.app/docs/#interface-header) in `gradio.Interface` that control how flagging works. We will go over them in greater detail. + +- `allow_flagging`: this parameter can be set to either `"manual"` (default), `"auto"`, or `"never"`. + - `manual`: users will see a button to flag, and samples are only flagged when the button is clicked. + - `auto`: users will not see a button to flag, but every sample will be flagged automatically. + - `never`: users will not see a button to flag, and no sample will be flagged. +- `flagging_options`: this parameter can be either `None` (default) or a list of strings. + - If `None`, then the user simply clicks on the **Flag** button and no additional options are shown. + - If a list of strings are provided, then the user sees several buttons, corresponding to each of the strings that are provided. For example, if the value of this parameter is `["Incorrect", "Ambiguous"]`, then buttons labeled **Flag as Incorrect** and **Flag as Ambiguous** appear. This only applies if `allow_flagging` is `"manual"`. + - The chosen option is then logged along with the input and output. +- `flagging_dir`: this parameter takes a string. + - It represents what to name the directory where flagged data is stored. +- `flagging_callback`: this parameter takes an instance of a subclass of the `FlaggingCallback` class + - Using this parameter allows you to write custom code that gets run when the flag button is clicked + - By default, this is set to an instance of `gr.CSVLogger` + - One example is setting it to an instance of `gr.HuggingFaceDatasetSaver` which can allow you to pipe any flagged data into a HuggingFace Dataset. (See more below.) + +## What happens to flagged data? + +Within the directory provided by the `flagging_dir` argument, a CSV file will log the flagged data. + +Here's an example: The code below creates the calculator interface embedded below it: + +```python +import gradio as gr + + +def calculator(num1, operation, num2): + if operation == "add": + return num1 + num2 + elif operation == "subtract": + return num1 - num2 + elif operation == "multiply": + return num1 * num2 + elif operation == "divide": + return num1 / num2 + + +iface = gr.Interface( + calculator, + ["number", gr.Radio(["add", "subtract", "multiply", "divide"]), "number"], + "number", + allow_flagging="manual" +) + +iface.launch() +``` + + + +When you click the flag button above, the directory where the interface was launched will include a new flagged subfolder, with a csv file inside it. This csv file includes all the data that was flagged. + +```directory ++-- flagged/ +| +-- logs.csv +``` + +_flagged/logs.csv_ + +```csv +num1,operation,num2,Output,timestamp +5,add,7,12,2022-01-31 11:40:51.093412 +6,subtract,1.5,4.5,2022-01-31 03:25:32.023542 +``` + +If the interface involves file data, such as for Image and Audio components, folders will be created to store those flagged data as well. For example an `image` input to `image` output interface will create the following structure. + +```directory ++-- flagged/ +| +-- logs.csv +| +-- image/ +| | +-- 0.png +| | +-- 1.png +| +-- Output/ +| | +-- 0.png +| | +-- 1.png +``` + +_flagged/logs.csv_ + +```csv +im,Output timestamp +im/0.png,Output/0.png,2022-02-04 19:49:58.026963 +im/1.png,Output/1.png,2022-02-02 10:40:51.093412 +``` + +If you wish for the user to provide a reason for flagging, you can pass a list of strings to the `flagging_options` argument of Interface. Users will have to select one of these choices when flagging, and the option will be saved as an additional column to the CSV. + +If we go back to the calculator example, the following code will create the interface embedded below it. + +```python +iface = gr.Interface( + calculator, + ["number", gr.Radio(["add", "subtract", "multiply", "divide"]), "number"], + "number", + allow_flagging="manual", + flagging_options=["wrong sign", "off by one", "other"] +) + +iface.launch() +``` + + + +When users click the flag button, the csv file will now include a column indicating the selected option. + +_flagged/logs.csv_ + +```csv +num1,operation,num2,Output,flag,timestamp +5,add,7,-12,wrong sign,2022-02-04 11:40:51.093412 +6,subtract,1.5,3.5,off by one,2022-02-04 11:42:32.062512 +``` + +## The HuggingFaceDatasetSaver Callback + +Sometimes, saving the data to a local CSV file doesn't make sense. For example, on Hugging Face +Spaces, developers typically don't have access to the underlying ephemeral machine hosting the Gradio +demo. That's why, by default, flagging is turned off in Hugging Face Space. However, +you may want to do something else with the flagged data. + +We've made this super easy with the `flagging_callback` parameter. + +For example, below we're going to pipe flagged data from our calculator example into a Hugging Face Dataset, e.g. so that we can build a "crowd-sourced" dataset: + +```python +import os + +HF_TOKEN = os.getenv('HF_TOKEN') +hf_writer = gr.HuggingFaceDatasetSaver(HF_TOKEN, "crowdsourced-calculator-demo") + +iface = gr.Interface( + calculator, + ["number", gr.Radio(["add", "subtract", "multiply", "divide"]), "number"], + "number", + description="Check out the crowd-sourced dataset at: [https://huggingface.co/datasets/aliabd/crowdsourced-calculator-demo](https://huggingface.co/datasets/aliabd/crowdsourced-calculator-demo)", + allow_flagging="manual", + flagging_options=["wrong sign", "off by one", "other"], + flagging_callback=hf_writer +) + +iface.launch() +``` + +Notice that we define our own +instance of `gradio.HuggingFaceDatasetSaver` using our Hugging Face token and +the name of a dataset we'd like to save samples to. In addition, we also set `allow_flagging="manual"` +because on Hugging Face Spaces, `allow_flagging` is set to `"never"` by default. Here's our demo: + + + +You can now see all the examples flagged above in this [public Hugging Face dataset](https://huggingface.co/datasets/aliabd/crowdsourced-calculator-demo). + +![flagging callback hf](https://github.com/gradio-app/gradio/blob/main/guides/assets/flagging-callback-hf.png?raw=true) + +We created the `gradio.HuggingFaceDatasetSaver` class, but you can pass your own custom class as long as it inherits from `FLaggingCallback` defined in [this file](https://github.com/gradio-app/gradio/blob/master/gradio/flagging.py). If you create a cool callback, contribute it to the repo! + +## Flagging with Blocks + +What about if you are using `gradio.Blocks`? On one hand, you have even more flexibility +with Blocks -- you can write whatever Python code you want to run when a button is clicked, +and assign that using the built-in events in Blocks. + +At the same time, you might want to use an existing `FlaggingCallback` to avoid writing extra code. +This requires two steps: + +1. You have to run your callback's `.setup()` somewhere in the code prior to the + first time you flag data +2. When the flagging button is clicked, then you trigger the callback's `.flag()` method, + making sure to collect the arguments correctly and disabling the typical preprocessing. + +Here is an example with an image sepia filter Blocks demo that lets you flag +data using the default `CSVLogger`: + +$code_blocks_flag +$demo_blocks_flag + +## Privacy + +Important Note: please make sure your users understand when the data they submit is being saved, and what you plan on doing with it. This is especially important when you use `allow_flagging=auto` (when all of the data submitted through the demo is being flagged) + +### That's all! Happy building :) diff --git a/testbed/gradio-app__gradio/guides/CONTRIBUTING.md b/testbed/gradio-app__gradio/guides/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..d43e06dbbf7cc172ac23b41c98b8a7be0f241349 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/CONTRIBUTING.md @@ -0,0 +1,30 @@ +# Contributing a Guide + +Want to help teach Gradio? Consider contributing a Guide! 🤗 + +Broadly speaking, there are two types of guides: + +- **Use cases**: guides that cover step-by-step how to build a particular type of machine learning demo or app using Gradio. Here's an example: [_Creating a Chatbot_](https://github.com/gradio-app/gradio/blob/master/guides/creating_a_chatbot.md) +- **Feature explanation**: guides that describe in detail a particular feature of Gradio. Here's an example: [_Using Flagging_](https://github.com/gradio-app/gradio/blob/master/guides/using_flagging.md) + +We encourage you to submit either type of Guide! (Looking for ideas? We may also have open [issues](https://github.com/gradio-app/gradio/issues?q=is%3Aopen+is%3Aissue+label%3Aguides) where users have asked for guides on particular topics) + +## Guide Structure + +As you can see with the previous examples, Guides are standard markdown documents. They usually: + +- start with an Introduction section describing the topic +- include subheadings to make articles easy to navigate +- include real code snippets that make it easy to follow along and implement the Guide +- include embedded Gradio demos to make them more interactive and provide immediate demonstrations of the topic being discussed. These Gradio demos are hosted on [Hugging Face Spaces](https://huggingface.co/spaces) and are embedded using the standard \ tag. + +## How to Contribute a Guide + +1. Clone or fork this `gradio` repo +2. Add a new markdown document with a descriptive title to the `/guides` folder +3. Write your Guide in standard markdown! Embed Gradio demos wherever helpful +4. Add a list of `related_spaces` at the top of the markdown document (see the previously linked Guides for how to do this) +5. Add 3 `tags` at the top of the markdown document to help users find your guide (again, see the previously linked Guides for how to do this) +6. Open a PR to have your guide reviewed + +That's it! We're looking forward to reading your Guide 🥳 diff --git a/testbed/gradio-app__gradio/guides/assets/dataflow.svg b/testbed/gradio-app__gradio/guides/assets/dataflow.svg new file mode 100644 index 0000000000000000000000000000000000000000..43e7e86f6822b2a0e5559105c275edcb479d4348 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/assets/dataflow.svg @@ -0,0 +1,188 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/guides/assets/sharing.svg b/testbed/gradio-app__gradio/guides/assets/sharing.svg new file mode 100644 index 0000000000000000000000000000000000000000..334e0d011e3d87e37d82368808fe6dad9c41766d --- /dev/null +++ b/testbed/gradio-app__gradio/guides/assets/sharing.svg @@ -0,0 +1,487 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lion + lion + HOST + REMOTE USERS + diff --git a/testbed/gradio-app__gradio/guides/cn/01_getting-started/01_quickstart.md b/testbed/gradio-app__gradio/guides/cn/01_getting-started/01_quickstart.md new file mode 100644 index 0000000000000000000000000000000000000000..17c667b24f882433b72a73ab08dc6c5643d58a42 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/01_getting-started/01_quickstart.md @@ -0,0 +1,118 @@ +# 快速开始 + +**先决条件**:Gradio 需要 Python 3.8 或更高版本,就是这样! + +## Gradio 是做什么的? + +与他人分享您的机器学习模型、API 或数据科学流程的*最佳方式之一*是创建一个**交互式应用程序**,让您的用户或同事可以在他们的浏览器中尝试演示。 + +Gradio 允许您**使用 Python 构建演示并共享这些演示**。通常只需几行代码!那么我们开始吧。 + +## Hello, World + +要通过一个简单的“Hello, World”示例运行 Gradio,请遵循以下三个步骤: + +1. 使用 pip 安装 Gradio: + +```bash +pip install gradio +``` + +2. 将下面的代码作为 Python 脚本运行或在 Jupyter Notebook 中运行(或者 [Google Colab](https://colab.research.google.com/drive/18ODkJvyxHutTN0P5APWyGFO_xwNcgHDZ?usp=sharing)): + +$code_hello_world + +我们将导入的名称缩短为 `gr`,以便以后在使用 Gradio 的代码中更容易理解。这是一种广泛采用的约定,您应该遵循,以便与您的代码一起工作的任何人都可以轻松理解。 + +3. 在 Jupyter Notebook 中,该演示将自动显示;如果从脚本运行,则会在浏览器中弹出,网址为 [http://localhost:7860](http://localhost:7860): + +$demo_hello_world + +在本地开发时,如果您想将代码作为 Python 脚本运行,您可以使用 Gradio CLI 以**重载模式**启动应用程序,这将提供无缝和快速的开发。了解有关[自动重载指南](https://gradio.app/developing-faster-with-reload-mode/)中重新加载的更多信息。 + +```bash +gradio app.py +``` + +注意:您也可以运行 `python app.py`,但它不会提供自动重新加载机制。 + +## `Interface` 类 + +您会注意到为了创建演示,我们创建了一个 `gr.Interface`。`Interface` 类可以将任何 Python 函数与用户界面配对。在上面的示例中,我们看到了一个简单的基于文本的函数,但该函数可以是任何内容,从音乐生成器到税款计算器再到预训练的机器学习模型的预测函数。 + +`Interface` 类的核心是使用三个必需参数进行初始化: + +- `fn`:要在其周围包装 UI 的函数 +- `inputs`:用于输入的组件(例如 `"text"`、`"image"` 或 `"audio"`) +- `outputs`:用于输出的组件(例如 `"text"`、`"image"` 或 `"label"`) + +让我们更详细地了解用于提供输入和输出的组件。 + +## 组件属性 (Components Attributes) + +我们在前面的示例中看到了一些简单的 `Textbox` 组件,但是如果您想更改 UI 组件的外观或行为怎么办? + +假设您想自定义输入文本字段 - 例如,您希望它更大并具有文本占位符。如果我们使用实际的 `Textbox` 类而不是使用字符串快捷方式,您可以通过组件属性获得更多的自定义功能。 + +$code_hello_world_2 +$demo_hello_world_2 + +## 多个输入和输出组件 + +假设您有一个更复杂的函数,具有多个输入和输出。在下面的示例中,我们定义了一个接受字符串、布尔值和数字,并返回字符串和数字的函数。请看一下如何传递输入和输出组件的列表。 + +$code_hello_world_3 +$demo_hello_world_3 + +只需将组件包装在列表中。`inputs` 列表中的每个组件对应函数的一个参数,顺序相同。`outputs` 列表中的每个组件对应函数返回的一个值,同样是顺序。 + +## 图像示例 + +Gradio 支持许多类型的组件,例如 `Image`、`DataFrame`、`Video` 或 `Label`。让我们尝试一个图像到图像的函数,以了解这些组件的感觉! + +$code_sepia_filter +$demo_sepia_filter + +使用 `Image` 组件作为输入时,您的函数将接收到一个形状为`(高度,宽度,3)` 的 NumPy 数组,其中最后一个维度表示 RGB 值。我们还将返回一个图像,形式为 NumPy 数组。 + +您还可以使用 `type=` 关键字参数设置组件使用的数据类型。例如,如果您希望函数接受图像文件路径而不是 NumPy 数组,输入 `Image` 组件可以写成: + +```python +gr.Image(type="filepath", shape=...) +``` + +还要注意,我们的输入 `Image` 组件附带有一个编辑按钮🖉,允许裁剪和缩放图像。通过这种方式操作图像可以帮助揭示机器学习模型中的偏见或隐藏的缺陷! + +您可以在[Gradio 文档](https://gradio.app/docs)中阅读有关许多组件以及如何使用它们的更多信息。 + +## Blocks:更灵活和可控 + +Gradio 提供了两个类来构建应用程序: + +1. **Interface**,提供了用于创建演示的高级抽象,我们到目前为止一直在讨论。 + +2. **Blocks**,用于以更灵活的布局和数据流设计 Web 应用程序的低级 API。Blocks 允许您执行诸如特性多个数据流和演示,控制组件在页面上的出现位置,处理复杂的数据流(例如,输出可以作为其他函数的输入),并基于用户交互更新组件的属性 / 可见性等操作 - 仍然全部使用 Python。如果您需要这种可定制性,请尝试使用 `Blocks`! + +## Hello, Blocks + +让我们看一个简单的示例。请注意,此处的 API 与 `Interface` 不同。 + +$code_hello_blocks +$demo_hello_blocks + +需要注意的事项: + +- `Blocks` 可以使用 `with` 子句创建,此子句中创建的任何组件都会自动添加到应用程序中。 +- 组件以按创建顺序垂直放置在应用程序中。(稍后我们将介绍自定义布局!) +- 创建了一个 `Button`,然后在此按钮上添加了一个 `click` 事件监听器。对于这个 API,应该很熟悉!与 `Interface` 类似,`click` 方法接受一个 Python 函数、输入组件和输出组件。 + +## 更复杂的应用 + +下面是一个应用程序,以让您对 `Blocks` 可以实现的更多内容有所了解: + +$code_blocks_flipper +$demo_blocks_flipper + +这里有更多的东西!在[building with blocks](https://gradio.app/building_with_blocks)部分中,我们将介绍如何创建像这样的复杂的 `Blocks` 应用程序。 + +恭喜,您已经熟悉了 Gradio 的基础知识! 🥳 转到我们的[下一个指南](https://gradio.app/key_features)了解更多关于 Gradio 的主要功能。 diff --git a/testbed/gradio-app__gradio/guides/cn/01_getting-started/02_key-features.md b/testbed/gradio-app__gradio/guides/cn/01_getting-started/02_key-features.md new file mode 100644 index 0000000000000000000000000000000000000000..4f2f28d39f83a4e0a38bc8cfade03841123c3d56 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/01_getting-started/02_key-features.md @@ -0,0 +1,266 @@ +# 主要特点 + +让我们来介绍一下 Gradio 最受欢迎的一些功能!这里是 Gradio 的主要特点: + +1. [添加示例输入](#example-inputs) +2. [传递自定义错误消息](#errors) +3. [添加描述内容](#descriptive-content) +4. [设置旗标](#flagging) +5. [预处理和后处理](#preprocessing-and-postprocessing) +6. [样式化演示](#styling) +7. [排队用户](#queuing) +8. [迭代输出](#iterative-outputs) +9. [进度条](#progress-bars) +10. [批处理函数](#batch-functions) +11. [在协作笔记本上运行](#colab-notebooks) + +## 示例输入 + +您可以提供用户可以轻松加载到 "Interface" 中的示例数据。这对于演示模型期望的输入类型以及演示数据集和模型一起探索的方式非常有帮助。要加载示例数据,您可以将嵌套列表提供给 Interface 构造函数的 `examples=` 关键字参数。外部列表中的每个子列表表示一个数据样本,子列表中的每个元素表示每个输入组件的输入。有关每个组件的示例数据格式在[Docs](https://gradio.app/docs#components)中有说明。 + +$code_calculator +$demo_calculator + +您可以将大型数据集加载到示例中,通过 Gradio 浏览和与数据集进行交互。示例将自动分页(可以通过 Interface 的 `examples_per_page` 参数进行配置)。 + +继续了解示例,请参阅[更多示例](https://gradio.app/more-on-examples)指南。 + +## 错误 + +您希望向用户传递自定义错误消息。为此,with `gr.Error("custom message")` 来显示错误消息。如果在上面的计算器示例中尝试除以零,将显示自定义错误消息的弹出模态窗口。了解有关错误的更多信息,请参阅[文档](https://gradio.app/docs#error)。 + +## 描述性内容 + +在前面的示例中,您可能已经注意到 Interface 构造函数中的 `title=` 和 `description=` 关键字参数,帮助用户了解您的应用程序。 + +Interface 构造函数中有三个参数用于指定此内容应放置在哪里: + +- `title`:接受文本,并可以将其显示在界面的顶部,也将成为页面标题。 +- `description`:接受文本、Markdown 或 HTML,并将其放置在标题正下方。 +- `article`:也接受文本、Markdown 或 HTML,并将其放置在界面下方。 + +![annotated](/assets/guides/annotated.png) + +如果您使用的是 `Blocks` API,则可以 with `gr.Markdown(...)` 或 `gr.HTML(...)` 组件在任何位置插入文本、Markdown 或 HTML,其中描述性内容位于 `Component` 构造函数内部。 + +另一个有用的关键字参数是 `label=`,它存在于每个 `Component` 中。这修改了每个 `Component` 顶部的标签文本。还可以为诸如 `Textbox` 或 `Radio` 之类的表单元素添加 `info=` 关键字参数,以提供有关其用法的进一步信息。 + +```python +gr.Number(label='年龄', info='以年为单位,必须大于0') +``` + +## 旗标 + +默认情况下,"Interface" 将有一个 "Flag" 按钮。当用户测试您的 `Interface` 时,如果看到有趣的输出,例如错误或意外的模型行为,他们可以将输入标记为您进行查看。在由 `Interface` 构造函数的 `flagging_dir=` 参数提供的目录中,将记录标记的输入到一个 CSV 文件中。如果界面涉及文件数据,例如图像和音频组件,将创建文件夹来存储这些标记的数据。 + +例如,对于上面显示的计算器界面,我们将在下面的旗标目录中存储标记的数据: + +```directory ++-- calculator.py ++-- flagged/ +| +-- logs.csv +``` + +_flagged/logs.csv_ + +```csv +num1,operation,num2,Output +5,add,7,12 +6,subtract,1.5,4.5 +``` + +与早期显示的冷色界面相对应,我们将在下面的旗标目录中存储标记的数据: + +```directory ++-- sepia.py ++-- flagged/ +| +-- logs.csv +| +-- im/ +| | +-- 0.png +| | +-- 1.png +| +-- Output/ +| | +-- 0.png +| | +-- 1.png +``` + +_flagged/logs.csv_ + +```csv +im,Output +im/0.png,Output/0.png +im/1.png,Output/1.png +``` + +如果您希望用户提供旗标原因,可以将字符串列表传递给 Interface 的 `flagging_options` 参数。用户在进行旗标时必须选择其中一个字符串,这将作为附加列保存到 CSV 中。 + +## 预处理和后处理 (Preprocessing and Postprocessing) + +![annotated](/assets/img/dataflow.svg) + +如您所见,Gradio 包括可以处理各种不同数据类型的组件,例如图像、音频和视频。大多数组件都可以用作输入或输出。 + +当组件用作输入时,Gradio 自动处理*预处理*,将数据从用户浏览器发送的类型(例如网络摄像头快照的 base64 表示)转换为您的函数可以接受的形式(例如 `numpy` 数组)。 + +同样,当组件用作输出时,Gradio 自动处理*后处理*,将数据从函数返回的形式(例如图像路径列表)转换为可以在用户浏览器中显示的形式(例如以 base64 格式显示图像的 `Gallery`)。 + +您可以使用构建图像组件时的参数控制*预处理*。例如,如果您使用以下参数实例化 `Image` 组件,它将将图像转换为 `PIL` 类型,并将其重塑为`(100, 100)`,而不管提交时的原始大小如何: + +```py +img = gr.Image(shape=(100, 100), type="pil") +``` + +相反,这里我们保留图像的原始大小,但在将其转换为 numpy 数组之前反转颜色: + +```py +img = gr.Image(invert_colors=True, type="numpy") +``` + +后处理要容易得多!Gradio 自动识别返回数据的格式(例如 `Image` 是 `numpy` 数组还是 `str` 文件路径?),并将其后处理为可以由浏览器显示的格式。 + +请查看[文档](https://gradio.app/docs),了解每个组件的所有与预处理相关的参数。 + +## 样式 (Styling) + +Gradio 主题是自定义应用程序外观和感觉的最简单方法。您可以选择多种主题或创建自己的主题。要这样做,请将 `theme=` 参数传递给 `Interface` 构造函数。例如: + +```python +demo = gr.Interface(..., theme=gr.themes.Monochrome()) +``` + +Gradio 带有一组预先构建的主题,您可以从 `gr.themes.*` 加载。您可以扩展这些主题或从头开始创建自己的主题 - 有关更多详细信息,请参阅[主题指南](https://gradio.app/theming-guide)。 + +要增加额外的样式能力,您可以 with `css=` 关键字将任何 CSS 传递给您的应用程序。 +Gradio 应用程序的基类是 `gradio-container`,因此以下是一个更改 Gradio 应用程序背景颜色的示例: + +```python +with `gr.Interface(css=".gradio-container {background-color: red}") as demo: + ... +``` + +## 队列 (Queuing) + +如果您的应用程序预计会有大量流量,请 with `queue()` 方法来控制处理速率。这将排队处理调用,因此一次只处理一定数量的请求。队列使用 Websockets,还可以防止网络超时,因此如果您的函数的推理时间很长(> 1 分钟),应使用队列。 + +with `Interface`: + +```python +demo = gr.Interface(...).queue() +demo.launch() +``` + +with `Blocks`: + +```python +with gr.Blocks() as demo: + #... +demo.queue() +demo.launch() +``` + +您可以通过以下方式控制一次处理的请求数量: + +```python +demo.queue(concurrency_count=3) +``` + +查看有关配置其他队列参数的[队列文档](/docs/#queue)。 + +在 Blocks 中指定仅对某些函数进行排队: + +```python +with gr.Blocks() as demo2: + num1 = gr.Number() + num2 = gr.Number() + output = gr.Number() + gr.Button("Add").click( + lambda a, b: a + b, [num1, num2], output) + gr.Button("Multiply").click( + lambda a, b: a * b, [num1, num2], output, queue=True) +demo2.launch() +``` + +## 迭代输出 (Iterative Outputs) + +在某些情况下,您可能需要传输一系列输出而不是一次显示单个输出。例如,您可能有一个图像生成模型,希望显示生成的每个步骤的图像,直到最终图像。或者您可能有一个聊天机器人,它逐字逐句地流式传输响应,而不是一次返回全部响应。 + +在这种情况下,您可以将**生成器**函数提供给 Gradio,而不是常规函数。在 Python 中创建生成器非常简单:函数不应该有一个单独的 `return` 值,而是应该 with `yield` 连续返回一系列值。通常,`yield` 语句放置在某种循环中。下面是一个简单示例,生成器只是简单计数到给定数字: + +```python +def my_generator(x): + for i in range(x): + yield i +``` + +您以与常规函数相同的方式将生成器提供给 Gradio。例如,这是一个(虚拟的)图像生成模型,它在输出图像之前生成数个步骤的噪音: + +$code_fake_diffusion +$demo_fake_diffusion + +请注意,我们在迭代器中添加了 `time.sleep(1)`,以创建步骤之间的人工暂停,以便您可以观察迭代器的步骤(在真实的图像生成模型中,这可能是不必要的)。 + +将生成器提供给 Gradio **需要**在底层 Interface 或 Blocks 中启用队列(请参阅上面的队列部分)。 + +## 进度条 + +Gradio 支持创建自定义进度条,以便您可以自定义和控制向用户显示的进度更新。要启用此功能,只需为方法添加一个默认值为 `gr.Progress` 实例的参数即可。然后,您可以直接调用此实例并传入 0 到 1 之间的浮点数来更新进度级别,或者 with `Progress` 实例的 `tqdm()` 方法来跟踪可迭代对象上的进度,如下所示。必须启用队列以进行进度更新。 + +$code_progress_simple +$demo_progress_simple + +如果您 with `tqdm` 库,并且希望从函数内部的任何 `tqdm.tqdm` 自动报告进度更新,请将默认参数设置为 `gr.Progress(track_tqdm=True)`! + +## 批处理函数 (Batch Functions) + +Gradio 支持传递*批处理*函数。批处理函数只是接受输入列表并返回预测列表的函数。 + +例如,这是一个批处理函数,它接受两个输入列表(一个单词列表和一个整数列表),并返回修剪过的单词列表作为输出: + +```python +import time + +def trim_words(words, lens): + trimmed_words = [] + time.sleep(5) + for w, l in zip(words, lens): + trimmed_words.append(w[:int(l)]) + return [trimmed_words] + for w, l in zip(words, lens): +``` + +使用批处理函数的优点是,如果启用了队列,Gradio 服务器可以自动*批处理*传入的请求并并行处理它们,从而可能加快演示速度。以下是 Gradio 代码的示例(请注意 `batch=True` 和 `max_batch_size=16` - 这两个参数都可以传递给事件触发器或 `Interface` 类) + +with `Interface`: + +```python +demo = gr.Interface(trim_words, ["textbox", "number"], ["output"], + batch=True, max_batch_size=16) +demo.queue() +demo.launch() +``` + +with `Blocks`: + +```python +import gradio as gr + +with gr.Blocks() as demo: + with gr.Row(): + word = gr.Textbox(label="word") + leng = gr.Number(label="leng") + output = gr.Textbox(label="Output") + with gr.Row(): + run = gr.Button() + + event = run.click(trim_words, [word, leng], output, batch=True, max_batch_size=16) + +demo.queue() +demo.launch() +``` + +在上面的示例中,可以并行处理 16 个请求(总推理时间为 5 秒),而不是分别处理每个请求(总推理时间为 80 秒)。许多 Hugging Face 的 `transformers` 和 `diffusers` 模型在 Gradio 的批处理模式下自然工作:这是[使用批处理生成图像的示例演示](https://github.com/gradio-app/gradio/blob/main/demo/diffusers_with_batching/run.py) + +注意:使用 Gradio 的批处理函数 **requires** 在底层 Interface 或 Blocks 中启用队列(请参阅上面的队列部分)。 + +## Gradio 笔记本 (Colab Notebooks) + +Gradio 可以在任何运行 Python 的地方运行,包括本地 Jupyter 笔记本和协作笔记本,如[Google Colab](https://colab.research.google.com/)。对于本地 Jupyter 笔记本和 Google Colab 笔记本,Gradio 在本地服务器上运行,您可以在浏览器中与之交互。(注意:对于 Google Colab,这是通过[服务工作器隧道](https://github.com/tensorflow/tensorboard/blob/master/docs/design/colab_integration.md)实现的,您的浏览器需要启用 cookies。)对于其他远程笔记本,Gradio 也将在服务器上运行,但您需要使用[SSH 隧道](https://coderwall.com/p/ohk6cg/remote-access-to-ipython-notebooks-via-ssh)在本地浏览器中查看应用程序。通常,更简单的选择是使用 Gradio 内置的公共链接,[在下一篇指南中讨论](/sharing-your-app/#sharing-demos)。 diff --git a/testbed/gradio-app__gradio/guides/cn/01_getting-started/03_sharing-your-app.md b/testbed/gradio-app__gradio/guides/cn/01_getting-started/03_sharing-your-app.md new file mode 100644 index 0000000000000000000000000000000000000000..3947aba7a711952718f52f2e93746414039ae6ac --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/01_getting-started/03_sharing-your-app.md @@ -0,0 +1,212 @@ +# 分享您的应用 + +如何分享您的 Gradio 应用: + +1. [使用 share 参数分享演示](#sharing-demos) +2. [在 HF Spaces 上托管](#hosting-on-hf-spaces) +3. [嵌入托管的空间](#embedding-hosted-spaces) +4. [使用 Web 组件嵌入](#embedding-with-web-components) +5. [使用 API 页面](#api-page) +6. [在页面上添加身份验证](#authentication) +7. [访问网络请求](#accessing-the-network-request-directly) +8. [在 FastAPI 中挂载](#mounting-within-another-fastapi-app) +9. [安全性](#security-and-file-access) + +## 分享演示 + +通过在 `launch()` 方法中设置 `share=True`,可以轻松公开分享 Gradio 演示。就像这样: + +```python +demo.launch(share=True) +``` + +这将生成一个公开的可分享链接,您可以将其发送给任何人!当您发送此链接时,对方用户可以在其浏览器中尝试模型。因为处理过程发生在您的设备上(只要您的设备保持开启!),您不必担心任何打包依赖项的问题。一个分享链接通常看起来像这样:**XXXXX.gradio.app**。尽管链接是通过 Gradio URL 提供的,但我们只是您本地服务器的代理,并不会存储通过您的应用发送的任何数据。 + +但请记住,这些链接可以被公开访问,这意味着任何人都可以使用您的模型进行预测!因此,请确保不要通过您编写的函数公开任何敏感信息,也不要允许在您的设备上进行任何关键更改。如果您设置 `share=False`(默认值,在 colab 笔记本中除外),则只创建一个本地链接,可以通过[端口转发](https://www.ssh.com/ssh/tunneling/example)与特定用户共享。 + + + +分享链接在 72 小时后过期。 + +## 在 HF Spaces 上托管 + +如果您想在互联网上获得您的 Gradio 演示的永久链接,请使用 Hugging Face Spaces。 [Hugging Face Spaces](http://huggingface.co/spaces/) 提供了免费托管您的机器学习模型的基础设施! + +在您创建了一个免费的 Hugging Face 账户后,有三种方法可以将您的 Gradio 应用部署到 Hugging Face Spaces: + +1. 从终端:在应用目录中运行 `gradio deploy`。CLI 将收集一些基本元数据,然后启动您的应用。要更新您的空间,可以重新运行此命令或启用 Github Actions 选项,在 `git push` 时自动更新 Spaces。 +2. 从浏览器:将包含 Gradio 模型和所有相关文件的文件夹拖放到 [此处](https://huggingface.co/new-space)。 +3. 将 Spaces 与您的 Git 存储库连接,Spaces 将从那里拉取 Gradio 应用。有关更多信息,请参阅 [此指南如何在 Hugging Face Spaces 上托管](https://huggingface.co/blog/gradio-spaces)。 + + + +## 嵌入托管的空间 + +一旦您将应用托管在 Hugging Face Spaces(或您自己的服务器上),您可能希望将演示嵌入到不同的网站上,例如您的博客或个人作品集。嵌入交互式演示使人们可以在他们的浏览器中尝试您构建的机器学习模型,而无需下载或安装任何内容!最好的部分是,您甚至可以将交互式演示嵌入到静态网站中,例如 GitHub 页面。 + +有两种方法可以嵌入您的 Gradio 演示。您可以在 Hugging Face Space 页面的“嵌入此空间”下拉选项中直接找到这两个选项的快速链接: + +![嵌入此空间下拉选项](/assets/guides/embed_this_space.png) + +### 使用 Web 组件嵌入 + +与 IFrames 相比,Web 组件通常为用户提供更好的体验。Web 组件进行延迟加载,这意味着它们不会减慢您网站的加载时间,并且它们会根据 Gradio 应用的大小自动调整其高度。 + +要使用 Web 组件嵌入: + +1. 通过在您的网站中添加以下脚本来导入 gradio JS 库(在 URL 中替换{GRADIO_VERSION}为您使用的 Gradio 库的版本)。 + + ```html + + <script type="module" + src="https://gradio.s3-us-west-2.amazonaws.com/{GRADIO_VERSION}/gradio.js"> + </script> + ``` + +2. 在您想放置应用的位置添加 + `html +<gradio-app src="https://$your_space_host.hf.space"></gradio-app> + ` + 元素。将 `src=` 属性设置为您的 Space 的嵌入 URL,您可以在“嵌入此空间”按钮中找到。例如: + + ```html + + <gradio-app src="https://abidlabs-pytorch-image-classifier.hf.space"></gradio-app> + ``` + + + +您可以在 Gradio 首页 上查看 Web 组件的示例。 + +您还可以使用传递给 `` 标签的属性来自定义 Web 组件的外观和行为: + +- `src`:如前所述,`src` 属性链接到您想要嵌入的托管 Gradio 演示的 URL +- `space`:一个可选的缩写,如果您的 Gradio 演示托管在 Hugging Face Space 上。接受 `username/space_name` 而不是完整的 URL。示例:`gradio/Echocardiogram-Segmentation`。如果提供了此属性,则不需要提供 `src`。 +- `control_page_title`:一个布尔值,指定是否将 html 标题设置为 Gradio 应用的标题(默认为 `"false"`) +- `initial_height`:加载 Gradio 应用时 Web 组件的初始高度(默认为 `"300px"`)。请注意,最终高度是根据 Gradio 应用的大小设置的。 +- `container`:是否显示边框框架和有关 Space 托管位置的信息(默认为 `"true"`) +- `info`:是否仅显示有关 Space 托管位置的信息在嵌入的应用程序下方(默认为 `"true"`) +- `autoscroll`:在预测完成后是否自动滚动到输出(默认为 `"false"`) +- `eager`:在页面加载时是否立即加载 Gradio 应用(默认为 `"false"`) +- `theme_mode`:是否使用 `dark`,`light` 或默认的 `system` 主题模式(默认为 `"system"`) + +以下是使用这些属性创建一个懒加载且初始高度为 0px 的 Gradio 应用的示例。 + +```html +<gradio-app space="gradio/Echocardiogram-Segmentation" eager="true" +initial_height="0px"></gradio-app> +``` + +_ 注意:Gradio 的 CSS 永远不会影响嵌入页面,但嵌入页面可以影响嵌入的 Gradio 应用的样式。请确保父页面中的任何 CSS 不是如此通用,以至于它也可能适用于嵌入的 Gradio 应用并导致样式破裂。例如,元素选择器如 `header { ... }` 和 `footer { ... }` 最可能引起问题。_ + +### 使用 IFrames 嵌入 + +如果您无法向网站添加 javascript(例如),则可以改为使用 IFrames 进行嵌入,请添加以下元素: + +```html +<iframe src="https://$your_space_host.hf.space"></iframe> +``` + +同样,您可以在“嵌入此空间”按钮中找到您的 Space 的嵌入 URL 的 `src=` 属性。 + +注意:如果您使用 IFrames,您可能希望添加一个固定的 `height` 属性,并设置 `style="border:0;"` 以去除边框。此外,如果您的应用程序需要诸如访问摄像头或麦克风之类的权限,您还需要使用 `allow` 属性提供它们。 + +## API 页面 + +$demo_hello_world + +如果您点击并打开上面的空间,您会在应用的页脚看到一个“通过 API 使用”链接。 + +![通过 API 使用](/assets/guides/use_via_api.png) + +这是一个文档页面,记录了用户可以使用的 REST API 来查询“Interface”函数。`Blocks` 应用程序也可以生成 API 页面,但必须为每个事件监听器显式命名 API,例如: + +```python +btn.click(add, [num1, num2], output, api_name="addition") +``` + +这将记录自动生成的 API 页面的端点 `/api/addition/`。 + +_注意_:对于启用了[队列功能](https://gradio.app/key-features#queuing)的 Gradio 应用程序,如果用户向您的 API 端点发出 POST 请求,他们可以绕过队列。要禁用此行为,请在 `queue()` 方法中设置 `api_open=False`。 + +## 鉴权 + +您可能希望在您的应用程序前面放置一个鉴权页面,以限制谁可以打开您的应用程序。使用 `launch()` 方法中的 `auth=` 关键字参数,您可以提供一个包含用户名和密码的元组,或者一个可接受的用户名 / 密码元组列表;以下是一个为单个名为“admin”的用户提供基于密码的身份验证的示例: + +```python +demo.launch(auth=("admin", "pass1234")) +``` + +对于更复杂的身份验证处理,您甚至可以传递一个以用户名和密码作为参数的函数,并返回 True 以允许身份验证,否则返回 False。这可用于访问第三方身份验证服务等其他功能。 + +以下是一个接受任何用户名和密码相同的登录的函数示例: + +```python +def same_auth(username, password): + return username == password +demo.launch(auth=same_auth) +``` + +为了使身份验证正常工作,必须在浏览器中启用第三方 Cookie。 +默认情况下,Safari、Chrome 隐私模式不会启用此功能。 + +## 直接访问网络请求 + +当用户向您的应用程序进行预测时,您可能需要底层的网络请求,以获取请求标头(例如用于高级身份验证)、记录客户端的 IP 地址或其他原因。Gradio 支持与 FastAPI 类似的方式:只需添加一个类型提示为 `gr.Request` 的函数参数,Gradio 将将网络请求作为该参数传递进来。以下是一个示例: + +```python +import gradio as gr + +def echo(name, request: gr.Request): + if request: + print("Request headers dictionary:", request.headers) + print("IP address:", request.client.host) + return name + +io = gr.Interface(echo, "textbox", "textbox").launch() +``` + +注意:如果直接调用函数而不是通过 UI(例如在缓存示例时),则 `request` 将为 `None`。您应该明确处理此情况,以确保您的应用程序不会抛出任何错误。这就是为什么我们有显式检查 `if request`。 + +## 嵌入到另一个 FastAPI 应用程序中 + +在某些情况下,您可能已经有一个现有的 FastAPI 应用程序,并且您想要为 Gradio 演示添加一个路径。 +您可以使用 `gradio.mount_gradio_app()` 来轻松实现此目的。 + +以下是一个完整的示例: + +$code_custom_path + +请注意,此方法还允许您在自定义路径上运行 Gradio 应用程序(例如上面的 `http://localhost:8000/gradio`)。 + +## 安全性和文件访问 + +与他人共享 Gradio 应用程序(通过 Spaces、您自己的服务器或临时共享链接进行托管)将主机机器上的某些文件**暴露**给您的 Gradio 应用程序的用户。 + +特别是,Gradio 应用程序允许用户访问以下三类文件: + +- **与 Gradio 脚本所在目录(或子目录)中的文件相同。** 例如,如果您的 Gradio 脚本的路径是 `/home/usr/scripts/project/app.py`,并且您从 `/home/usr/scripts/project/` 启动它,则共享 Gradio 应用程序的用户将能够访问 `/home/usr/scripts/project/` 中的任何文件。这样做是为了您可以在 Gradio 应用程序中轻松引用这些文件(例如应用程序的“示例”)。 + +- **Gradio 创建的临时文件。** 这些是由 Gradio 作为运行您的预测函数的一部分创建的文件。例如,如果您的预测函数返回一个视频文件,则 Gradio 将该视频保存到临时文件中,然后将临时文件的路径发送到前端。您可以通过设置环境变量 `GRADIO_TEMP_DIR` 为绝对路径(例如 `/home/usr/scripts/project/temp/`)来自定义 Gradio 创建的临时文件的位置。 + +- **通过 `launch()` 中的 `allowed_paths` 参数允许的文件。** 此参数允许您传递一个包含其他目录或确切文件路径的列表,以允许用户访问它们。(默认情况下,此参数为空列表)。 + +Gradio**不允许**访问以下内容: + +- **点文件**(其名称以 '.' 开头的任何文件)或其名称以 '.' 开头的任何目录中的任何文件。 + +- **通过 `launch()` 中的 `blocked_paths` 参数允许的文件。** 您可以将其他目录或确切文件路径的列表传递给 `launch()` 中的 `blocked_paths` 参数。此参数优先于 Gradio 默认或 `allowed_paths` 允许的文件。 + +- **主机机器上的任何其他路径**。用户不应能够访问主机上的其他任意路径。 + +请确保您正在运行最新版本的 `gradio`,以使这些安全设置生效。 diff --git a/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/01_interface-state.md b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/01_interface-state.md new file mode 100644 index 0000000000000000000000000000000000000000..855e34b1a54954f6597c97f130738ed9789d0199 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/01_interface-state.md @@ -0,0 +1,28 @@ +# 接口状态 (Interface State) + +本指南介绍了 Gradio 中如何处理状态。了解全局状态和会话状态的区别,以及如何同时使用它们。 + +## 全局状态 (Global State) + +您的函数可能使用超出单个函数调用的持久性数据。如果数据是所有函数调用和所有用户都可访问的内容,您可以在函数调用外部创建一个变量,并在函数内部访问它。例如,您可能会在函数外部加载一个大模型,并在函数内部使用它,以便每个函数调用都不需要重新加载模型。 + +$code_score_tracker + +在上面的代码中,'scores' 数组在所有用户之间共享。如果多个用户访问此演示,他们的得分将全部添加到同一列表中,并且返回的前 3 个得分将从此共享引用中收集。 + +## 全局状态 (Global State) + +Gradio 支持的另一种数据持久性是会话状态,其中数据在页面会话中的多个提交之间持久存在。但是,不同用户之间的数据*不*共享。要将数据存储在会话状态中,需要执行以下三个步骤: + +1. 将额外的参数传递给您的函数,表示接口的状态。 +2. 在函数的末尾,作为额外的返回值返回状态的更新值。 +3. 在创建界面时添加 `'state'` 输入和 `'state'` 输出组件。 + +聊天机器人就是需要会话状态的一个例子 - 您希望访问用户之前的提交,但不能将聊天记录存储在全局变量中,因为这样聊天记录会在不同用户之间混乱。 + +$code_chatbot_dialogpt +$demo_chatbot_dialogpt + +请注意,在每个页面中,状态在提交之间保持不变,但是如果在另一个标签中加载此演示(或刷新页面),演示将不共享聊天记录。 + +`state` 的默认值为 None。如果您将默认值传递给函数的状态参数,则该默认值将用作状态的默认值。`Interface` 类仅支持单个输入和输出状态变量,但可以是具有多个元素的列表。对于更复杂的用例,您可以使用 Blocks,[它支持多个 `State` 变量](/state_in_blocks/)。 diff --git a/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/02_reactive-interfaces.md b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/02_reactive-interfaces.md new file mode 100644 index 0000000000000000000000000000000000000000..0510a00df276675ac270a24d8a3dcf53cfd3ec97 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/02_reactive-interfaces.md @@ -0,0 +1,22 @@ +# 反应式界面 (Reactive Interfaces) + +本指南介绍了如何使 Gradio 界面自动刷新或连续流式传输数据。 + +## 实时界面 (Live Interfaces) + +您可以通过在界面中设置 `live=True` 来使界面自动刷新。现在,只要用户输入发生变化,界面就会重新计算。 + +$code_calculator_live +$demo_calculator_live + +注意,因为界面在更改时会自动重新提交,所以没有提交按钮。 + +## 流式组件 (Streaming Components) + +某些组件具有“流式”模式,比如麦克风模式下的 `Audio` 组件或网络摄像头模式下的 `Image` 组件。流式传输意味着数据会持续发送到后端,并且 `Interface` 函数会持续重新运行。 + +当在 `gr.Interface(live=True)` 中同时使用 `gr.Audio(source='microphone')` 和 `gr.Audio(source='microphone', streaming=True)` 时,两者的区别在于第一个 `Component` 会在用户停止录制时自动提交数据并运行 `Interface` 函数,而第二个 `Component` 会在录制过程中持续发送数据并运行 `Interface` 函数。 + +以下是从网络摄像头实时流式传输图像的示例代码。 + +$code_stream_frames diff --git a/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/03_more-on-examples.md b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/03_more-on-examples.md new file mode 100644 index 0000000000000000000000000000000000000000..9d1407be35219e95fce64b47ab2035459998653c --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/03_more-on-examples.md @@ -0,0 +1,41 @@ +# 更多示例 (More on Examples) + +本指南介绍了有关示例的更多内容:从目录中加载示例,提供部分示例和缓存。如果你对示例还不熟悉,请查看 [关键特性](../key-features/#example-inputs) 指南中的介绍。 + +## 提供示例 (Providing Examples) + +正如 [关键特性](../key-features/#example-inputs) 指南中所介绍的,向接口添加示例就像提供一个列表的列表给 `examples` 关键字参数一样简单。 +每个子列表都是一个数据样本,其中每个元素对应于预测函数的一个输入。 +输入必须按照与预测函数期望的顺序排序。 + +如果你的接口只有一个输入组件,那么可以将示例提供为常规列表,而不是列表的列表。 + +### 从目录加载示例 (Loading Examples from a Directory) + +你还可以指定一个包含示例的目录路径。如果你的接口只接受单个文件类型的输入(例如图像分类器),你只需将目录文件路径传递给 `examples=` 参数,`Interface` 将加载目录中的图像作为示例。 +对于多个输入,该目录必须包含一个带有示例值的 log.csv 文件。 +在计算器演示的上下文中,我们可以设置 `examples='/demo/calculator/examples'` ,在该目录中包含以下 `log.csv` 文件: +contain a log.csv file with the example values. +In the context of the calculator demo, we can set `examples='/demo/calculator/examples'` and in that directory we include the following `log.csv` file: + +```csv +num,operation,num2 +5,"add",3 +4,"divide",2 +5,"multiply",3 +``` + +当浏览标记数据时,这将非常有用。只需指向标记目录,`Interface` 将从标记数据加载示例。 + +### 提供部分示例 + +有时你的应用程序有许多输入组件,但你只想为其中的一部分提供示例。为了在示例中排除某些输入,对于那些特定输入对应的所有数据样本都传递 `None`。 + +## 示例缓存 (Caching examples) + +你可能希望为用户提供一些模型的缓存示例,以便他们可以快速尝试,以防您的模型运行时间较长。 +如果 `cache_examples=True` ,当你调用 `launch()` 方法时,`Interface` 将运行所有示例,并保存输出。这些数据将保存在一个名为 `gradio_cached_examples` 的目录中。 + +每当用户点击示例时,输出将自动填充到应用程序中,使用来自该缓存目录的数据,而不是实际运行函数。这对于用户可以快速尝试您的模型而不增加任何负载是非常有用的! + +请记住一旦生成了缓存,它将不会在以后的启动中更新。如果示例或函数逻辑发生更改,请删除缓存文件夹以清除缓存并使用另一个 `launch()` 重新构建它。 diff --git a/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/04_advanced-interface-features.md b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/04_advanced-interface-features.md new file mode 100644 index 0000000000000000000000000000000000000000..d4bdacf5360a58716be503188cabcde29191678c --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/04_advanced-interface-features.md @@ -0,0 +1,97 @@ +# 高级接口特性 + +在[接口 Interface](https://gradio.app/docs#interface)类上还有更多内容需要介绍。本指南涵盖了所有高级特性:使用[解释器 Interpretation](https://gradio.app/docs#interpretation),自定义样式,从[Hugging Face Hub](https://hf.co)加载模型,以及使用[并行 Parallel](https://gradio.app/docs#parallel)和[串行 Series](https://gradio.app/docs#series)。 + +## 解释您的预测 + +大多数模型都是黑盒模型,函数的内部逻辑对最终用户来说是隐藏的。为了鼓励透明度,我们通过在 `Interface` 类中简单地将 `interpretation` 关键字设置为 `default`,使得为模型添加解释非常容易。这样,您的用户就可以了解到哪些输入部分对输出结果负责。请看下面的简单界面示例,它展示了一个图像分类器,还包括解释功能: + +$code_image_classifier_interpretation + +除了 `default`,Gradio 还包括了基于[Shapley-based interpretation](https://christophm.github.io/interpretable-ml-book/shap.html),它提供了更准确的解释,尽管运行时间通常较慢。要使用它,只需将 `interpretation` 参数设置为 `"shap"`(注意:还要确保安装了 Python 包 `shap`)。您还可以选择修改 `num_shap` 参数,该参数控制准确性和运行时间之间的权衡(增加此值通常会增加准确性)。下面是一个示例: + +```python +gr.Interface(fn=classify_image, inputs=image, outputs=label, interpretation="shap", num_shap=5).launch() +``` + +这适用于任何函数,即使在内部,模型是复杂的神经网络或其他黑盒模型。如果使用 Gradio 的 `default` 或 `shap` 解释,输出组件必须是 `Label`。支持所有常见的输入组件。下面是一个包含文本输入的示例。 + +$code_gender_sentence_default_interpretation + +那么在幕后发生了什么?使用这些解释方法,Gradio 会使用修改后的输入的多个版本进行多次预测。根据结果,您将看到界面自动将增加类别可能性的文本部分(或图像等)以红色突出显示。颜色的强度对应于输入部分的重要性。减少类别置信度的部分以蓝色突出显示。 + +您还可以编写自己的解释函数。下面的演示在前一个演示中添加了自定义解释。此函数将使用与主封装函数相同的输入。该解释函数的输出将用于突出显示每个输入组件的输入-因此函数必须返回一个列表,其中元素的数量与输入组件的数量相对应。要查看每个输入组件的解释格式,请查阅文档。 + +$code_gender_sentence_custom_interpretation + +在[文档](https://gradio.app/docs#interpretation)中了解更多关于解释的信息。 + +## 自定义样式 + +如果您希望对演示的任何方面都有更精细的控制,还可以编写自己的 CSS 或通过 `Interface` 类的 `css` 参数传递 CSS 文件的文件路径。 + +```python +gr.Interface(..., css="body {background-color: red}") +``` + +如果您希望在 CSS 中引用外部文件,请在文件路径(可以是相对路径或绝对路径)之前加上 `"file="`,例如: + +```python +gr.Interface(..., css="body {background-image: url('file=clouds.jpg')}") +``` + +**警告**:不能保证自定义 CSS 能够在 Gradio 的不同版本之间正常工作,因为 Gradio 的 HTML DOM 可能会发生更改。我们建议尽量少使用自定义 CSS,而尽可能使用[主题 Themes](/theming-guide/)。 + +## 加载 Hugging Face 模型和 Spaces + +Gradio 与[Hugging Face Hub](https://hf.co)完美集成,只需一行代码即可加载模型和 Spaces。要使用它,只需在 `Interface` 类中使用 `load()` 方法。所以: + +- 要从 Hugging Face Hub 加载任何模型并围绕它创建一个界面,您需要传递 `"model/"` 或 `"huggingface/"`,后面跟着模型名称,就像这些示例一样: + +```python +gr.Interface.load("huggingface/gpt2").launch(); +``` + +```python +gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", + inputs=gr.Textbox(lines=5, label="Input Text") # customizes the input component +).launch() +``` + +- 要从 Hugging Face Hub 加载任何 Space 并在本地重新创建它(这样您可以自定义输入和输出),您需要传递 `"spaces/"`,后面跟着模型名称: + +```python +gr.Interface.load("spaces/eugenesiow/remove-bg", inputs="webcam", title="Remove your webcam background!").launch() +``` + +使用 Gradio 使用加载 Hugging Face 模型或 spaces 的一个很棒的功能是,您可以立即像 Python 代码中的函数一样使用生成的 `Interface` 对象(这适用于每种类型的模型 / 空间:文本,图像,音频,视频,甚至是多模态模型): + +```python +io = gr.Interface.load("models/EleutherAI/gpt-neo-2.7B") +io("It was the best of times") # outputs model completion +``` + +## 并行和串行放置接口 + +Gradio 还可以使用 `gradio.Parallel` 和 `gradio.Series` 类非常容易地混合接口。`Parallel` 允许您将两个相似的模型(如果它们具有相同的输入类型)并行放置以比较模型预测: + +```python +generator1 = gr.Interface.load("huggingface/gpt2") +generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B") +generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B") + +gr.Parallel(generator1, generator2, generator3).launch() +``` + +`Series` 允许您将模型和 spaces 串行放置,将一个模型的输出传输到下一个模型的输入。 + +```python +generator = gr.Interface.load("huggingface/gpt2") +translator = gr.Interface.load("huggingface/t5-small") + +gr.Series(generator, translator).launch() # this demo generates text, then translates it to German, and outputs the final result. +``` + +当然,您还可以在适当的情况下同时使用 `Parallel` 和 `Series`! + +在[文档](https://gradio.app/docs#parallel)中了解有关并行和串行 (`Parallel` 和 `Series`) 的更多信息。 diff --git a/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/05_four-kinds-of-interfaces.md b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/05_four-kinds-of-interfaces.md new file mode 100644 index 0000000000000000000000000000000000000000..150d5c4ade67039eed9321648cd1f09858764924 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/02_building-interfaces/05_four-kinds-of-interfaces.md @@ -0,0 +1,44 @@ +# Gradio 界面的 4 种类型 + +到目前为止,我们一直假设构建 Gradio 演示需要同时具备输入和输出。但对于机器学习演示来说,并不总是如此:例如,*无条件图像生成模型*不需要任何输入,但会生成一张图像作为输出。 + +事实证明,`gradio.Interface` 类实际上可以处理 4 种不同类型的演示: + +1. **Standard demos 标准演示**:同时具有独立的输入和输出(例如图像分类器或语音转文本模型) +2. **Output-only demos 仅输出演示**:不接受任何输入,但会产生输出(例如无条件图像生成模型) +3. **Input-only demos 仅输入演示**:不产生任何输出,但会接受某种形式的输入(例如保存您上传到外部持久数据库的图像的演示) +4. **Unified demos 统一演示**:同时具有输入和输出组件,但这些组件是*相同的*。这意味着生成的输出将覆盖输入(例如文本自动完成模型) + +根据演示类型的不同,用户界面(UI)会有略微不同的外观: + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/interfaces4.png) + +我们来看一下如何使用 `Interface` 类构建每种类型的演示,以及示例: + +## 标准演示 (Standard demos) + +要创建具有输入和输出组件的演示,只需在 `Interface()` 中设置 `inputs` 和 `outputs` 参数的值。以下是一个简单图像滤镜的示例演示: + +$code_sepia_filter +$demo_sepia_filter + +## 仅输出演示 (Output-only demos) + +那么仅包含输出的演示呢?为了构建这样的演示,只需将 `Interface()` 中的 `inputs` 参数值设置为 `None`。以下是模拟图像生成模型的示例演示: + +$code_fake_gan_no_input +$demo_fake_gan_no_input + +## 仅输入演示 (Input-only demos) + +同样地,要创建仅包含输入的演示,将 `Interface()` 中的 `outputs` 参数值设置为 `None`。以下是将任何上传的图像保存到磁盘的示例演示: + +$code_save_file_no_output +$demo_save_file_no_output + +## 统一演示 (Unified demos) + +这种演示将单个组件同时作为输入和输出。只需将 `Interface()` 中的 `inputs` 和 `outputs` 参数值设置为相同的组件即可创建此演示。以下是文本生成模型的示例演示: + +$code_unified_demo_text_generation +$demo_unified_demo_text_generation diff --git a/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/01_blocks-and-event-listeners.md b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/01_blocks-and-event-listeners.md new file mode 100644 index 0000000000000000000000000000000000000000..7e60ffb91f07dbdf53e53020eacf4611d2ed3a70 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/01_blocks-and-event-listeners.md @@ -0,0 +1,157 @@ +# 区块和事件监听器 (Blocks and Event Listeners) + +我们在[快速入门](https://gradio.app/quickstart/#blocks-more-flexibility-and-control)中简要介绍了区块。让我们深入探讨一下。本指南将涵盖区块的结构、事件监听器及其类型、连续运行事件、更新配置以及使用字典与列表。 + +## 区块结构 (Blocks Structure) + +请查看下面的演示。 + +$code_hello_blocks +$demo_hello_blocks + +- 首先,注意 `with gr.Blocks() as demo:` 子句。区块应用程序代码将被包含在该子句中。 +- 接下来是组件。这些组件是在 `Interface` 中使用的相同组件。但是,与将组件传递给某个构造函数不同,组件在 `with` 子句内创建时会自动添加到区块中。 +- 最后,`click()` 事件监听器。事件监听器定义了应用程序内的数据流。在上面的示例中,监听器将两个文本框相互关联。文本框 `name` 作为输入,文本框 `output` 作为 `greet` 方法的输出。当单击按钮 `greet_btn` 时触发此数据流。与界面类似,事件监听器可以具有多个输入或输出。 + +## 事件监听器与交互性 (Event Listeners and Interactivity) + +在上面的示例中,您会注意到可以编辑文本框 `name`,但无法编辑文本框 `output`。这是因为作为事件监听器的任何组件都具有交互性。然而,由于文本框 `output` 仅作为输出,它没有交互性。您可以使用 `interactive=` 关键字参数直接配置组件的交互性。 + +```python +output = gr.Textbox(label="输出", interactive=True) +``` + +## 事件监听器的类型 (Types of Event Listeners) + +请查看下面的演示: + +$code_blocks_hello +$demo_blocks_hello + +`welcome` 函数不是由点击触发的,而是由在文本框 `inp` 中输入文字触发的。这是由于 `change()` 事件监听器。不同的组件支持不同的事件监听器。例如,`Video` 组件支持一个 `play()` 事件监听器,当用户按下播放按钮时触发。有关每个组件的事件监听器,请参见[文档](http://gradio.app/docs#components)。 + +## 多个数据流 (Multiple Data Flows) + +区块应用程序不像界面那样限制于单个数据流。请查看下面的演示: + +$code_reversible_flow +$demo_reversible_flow + +请注意,`num1` 可以充当 `num2` 的输入,反之亦然!随着应用程序变得更加复杂,您将能够连接各种组件的多个数据流。 + +下面是一个 " 多步骤 " 示例,其中一个模型的输出(语音到文本模型)被传递给下一个模型(情感分类器)。 + +$code_blocks_speech_text_sentiment +$demo_blocks_speech_text_sentiment + +## 函数输入列表与字典 (Function Input List vs Dict) + +到目前为止,您看到的事件监听器都只有一个输入组件。如果您希望有多个输入组件将数据传递给函数,有两种选项可供函数接受输入组件值: + +1. 作为参数列表,或 +2. 作为以组件为键的单个值字典 + +让我们分别看一个例子: +$code_calculator_list_and_dict + +`add()` 和 `sub()` 都将 `a` 和 `b` 作为输入。然而,这些监听器之间的语法不同。 + +1. 对于 `add_btn` 监听器,我们将输入作为列表传递。函数 `add()` 将每个输入作为参数。`a` 的值映射到参数 `num1`,`b` 的值映射到参数 `num2`。 +2. 对于 `sub_btn` 监听器,我们将输入作为集合传递(注意花括号!)。函数 `sub()` 接受一个名为 `data` 的单个字典参数,其中键是输入组件,值是这些组件的值。 + +使用哪种语法是个人偏好!对于具有许多输入组件的函数,选项 2 可能更容易管理。 + +$demo_calculator_list_and_dict + +## 函数返回列表与字典 (Function Return List vs Dict) + +类似地,您可以返回多个输出组件的值,可以是: + +1. 值列表,或 +2. 以组件为键的字典 + +首先让我们看一个(1)的示例,其中我们通过返回两个值来设置两个输出组件的值: + +```python +with gr.Blocks() as demo: + food_box = gr.Number(value=10, label="Food Count") + status_box = gr.Textbox() + def eat(food): + if food > 0: + return food - 1, "full" + else: + return 0, "hungry" + gr.Button("EAT").click( + fn=eat, + inputs=food_box, + outputs=[food_box, status_box] + ) +``` + +上面的每个返回语句分别返回与 `food_box` 和 `status_box` 相对应的两个值。 + +除了返回与每个输出组件顺序相对应的值列表外,您还可以返回一个字典,其中键对应于输出组件,值作为新值。这还允许您跳过更新某些输出组件。 + +```python +with gr.Blocks() as demo: + food_box = gr.Number(value=10, label="Food Count") + status_box = gr.Textbox() + def eat(food): + if food > 0: + return {food_box: food - 1, status_box: "full"} + else: + return {status_box: "hungry"} + gr.Button("EAT").click( + fn=eat, + inputs=food_box, + outputs=[food_box, status_box] + ) +``` + +注意,在没有食物的情况下,我们只更新 `status_box` 元素。我们跳过更新 `food_box` 组件。 + +字典返回在事件监听器影响多个组件的返回值或有条件地影响输出时非常有用。 + +请记住,对于字典返回,我们仍然需要在事件监听器中指定可能的输出组件。 + +## 更新组件配置 (Updating Component Configurations) + +事件监听器函数的返回值通常是相应输出组件的更新值。有时我们还希望更新组件的配置,例如可见性。在这种情况下,我们返回一个 `gr.update()` 对象,而不仅仅是更新组件的值。 + +$code_blocks_essay_simple +$demo_blocks_essay_simple + +请注意,我们可以通过 `gr.update()` 方法自我配置文本框。`value=` 参数仍然可以用于更新值以及组件配置。 + +## 连续运行事件 (Running Events Consecutively) + +你也可以使用事件监听器的 `then` 方法按顺序运行事件。在前一个事件运行完成后,这将运行下一个事件。这对于多步更新组件的事件非常有用。 + +例如,在下面的聊天机器人示例中,我们首先立即使用用户消息更新聊天机器人,然后在模拟延迟后使用计算机回复更新聊天机器人。 + +$code_chatbot_simple +$demo_chatbot_simple + +事件监听器的 `.then()` 方法会执行后续事件,无论前一个事件是否引发任何错误。如果只想在前一个事件成功执行后才运行后续事件,请使用 `.success()` 方法,该方法与 `.then()` 接受相同的参数。 + +## 连续运行事件 (Running Events Continuously) + +您可以使用事件监听器的 `every` 参数按固定计划运行事件。这将在客户端连接打开的情况下,每隔一定秒数运行一次事件。如果连接关闭,事件将在下一次迭代后停止运行。 +请注意,这不考虑事件本身的运行时间。因此,使用 `every=5` 运行时间为 1 秒的函数实际上每 6 秒运行一次。 + +以下是每秒更新的正弦曲线示例! + +$code_sine_curve +$demo_sine_curve + +## 收集事件数据 (Gathering Event Data) + +您可以通过将相关的事件数据类作为类型提示添加到事件监听器函数的参数中,收集有关事件的特定数据。 + +例如,使用 `gradio.SelectData` 参数可以为 `.select()` 的事件数据添加类型提示。当用户选择触发组件的一部分时,将触发此事件,并且事件数据包含有关用户的具体选择的信息。如果用户在 `Textbox` 中选择了特定单词,在 `Gallery` 中选择了特定图像或在 `DataFrame` 中选择了特定单元格,则事件数据参数将包含有关具体选择的信息。 + +在下面的双人井字游戏演示中,用户可以选择 `DataFrame` 中的一个单元格进行移动。事件数据参数包含有关所选单元格的信息。我们可以首先检查单元格是否为空,然后用用户的移动更新单元格。 + +$code_tictactoe + +$demo_tictactoe diff --git a/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/02_controlling-layout.md b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/02_controlling-layout.md new file mode 100644 index 0000000000000000000000000000000000000000..8b5df97bf0738751b724ec09c3a339125fe8cc3e --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/02_controlling-layout.md @@ -0,0 +1,95 @@ +# 控制布局 (Controlling Layout) + +默认情况下,块中的组件是垂直排列的。让我们看看如何重新排列组件。在幕后,这种布局结构使用了[Web 开发的 flexbox 模型](https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Flexible_Box_Layout/Basic_Concepts_of_Flexbox)。 + +## Row 行 + +`with gr.Row` 下的元素将水平显示。例如,要并排显示两个按钮: + +```python +with gr.Blocks() as demo: + with gr.Row(): + btn1 = gr.Button("按钮1") + btn2 = gr.Button("按钮2") +``` + +要使行中的每个元素具有相同的高度,请使用 `style` 方法的 `equal_height` 参数。 + +```python +with gr.Blocks() as demo: + with gr.Row(equal_height=True): + textbox = gr.Textbox() + btn2 = gr.Button("按钮2") +``` + +可以通过每个组件中存在的 `scale` 和 `min_width` 参数来控制行中元素的宽度。 + +- `scale` 是一个整数,定义了元素在行中的占用空间。如果将 scale 设置为 `0`,则元素不会扩展占用空间。如果将 scale 设置为 `1` 或更大,则元素将扩展。行中的多个元素将按比例扩展。在下面的示例中,`btn1` 将比 `btn2` 扩展两倍,而 `btn0` 将根本不会扩展: + +```python +with gr.Blocks() as demo: + with gr.Row(): + btn0 = gr.Button("按钮0", scale=0) + btn1 = gr.Button("按钮1", scale=1) + btn2 = gr.Button("按钮2", scale=2) +``` + +- `min_width` 将设置元素的最小宽度。如果没有足够的空间满足所有的 `min_width` 值,行将换行。 + +在[文档](https://gradio.app/docs/#row)中了解有关行的更多信息。 + +## 列和嵌套 (Columns and Nesting) + +列中的组件将垂直放置在一起。由于默认布局对于块应用程序来说是垂直布局,因此为了有用,列通常嵌套在行中。例如: + +$code_rows_and_columns +$demo_rows_and_columns + +查看第一列如何垂直排列两个文本框。第二列垂直排列图像和按钮。注意两列的相对宽度由 `scale` 参数设置。具有两倍 `scale` 值的列占据两倍的宽度。 + +在[文档](https://gradio.app/docs/#column)中了解有关列的更多信息。 + +## 选项卡和手风琴 (Tabs and Accordions) + +您还可以使用 `with gr.Tab('tab_name'):` 语句创建选项卡。在 `with gr.Tab('tab_name'):` 上下文中创建的任何组件都将显示在该选项卡中。连续的 Tab 子句被分组在一起,以便一次只能选择一个选项卡,并且只显示该选项卡上下文中的组件。 + +例如: + +$code_blocks_flipper +$demo_blocks_flipper + +还请注意本示例中的 `gr.Accordion('label')`。手风琴是一种可以切换打开或关闭的布局。与 `Tabs` 一样,它是可以选择性隐藏或显示内容的布局元素。在 `with gr.Accordion('label'):` 内定义的任何组件在单击手风琴的切换图标时都会被隐藏或显示。 + +在文档中了解有关[Tabs](https://gradio.app/docs/#tab)和[Accordions](https://gradio.app/docs/#accordion)的更多信息。 + +## 可见性 (Visibility) + +组件和布局元素都有一个 `visible` 参数,可以在初始时设置,并使用 `gr.update()` 进行更新。在 Column 上设置 `gr.update(visible=...)` 可用于显示或隐藏一组组件。 + +$code_blocks_form +$demo_blocks_form + +## 可变数量的输出 (Variable Number of Outputs) + +通过以动态方式调整组件的可见性,可以创建支持 _可变数量输出_ 的 Gradio 演示。这是一个非常简单的例子,其中输出文本框的数量由输入滑块控制: + +例如: + +$code_variable_outputs +$demo_variable_outputs + +## 分开定义和渲染组件 (Defining and Rendering Components Separately) + +在某些情况下,您可能希望在实际渲染 UI 之前定义组件。例如,您可能希望在相应的 `gr.Textbox` 输入上方显示示例部分,使用 `gr.Examples`。由于 `gr.Examples` 需要一个参数作为输入组件对象,您需要先定义输入组件,然后在定义 `gr.Examples` 对象之后再渲染它。 + +解决方法是在 `gr.Blocks()` 范围之外定义 `gr.Textbox`,并在 UI 中想要放置它的位置使用组件的 `.render()` 方法。 + +这是一个完整的代码示例: + +```python +input_textbox = gr.Textbox() + +with gr.Blocks() as demo: + gr.Examples(["hello", "bonjour", "merhaba"], input_textbox) + input_textbox.render() +``` diff --git a/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/03_state-in-blocks.md b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/03_state-in-blocks.md new file mode 100644 index 0000000000000000000000000000000000000000..6c3db69701529e5fa983f9e0700ac4939bd52728 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/03_state-in-blocks.md @@ -0,0 +1,30 @@ +# 分块状态 (State in Blocks) + +我们已经介绍了[接口状态](https://gradio.app/interface-state),这篇指南将介绍分块状态,它的工作原理大致相同。 + +## 全局状态 (Global State) + +分块中的全局状态与接口中的全局状态相同。在函数调用外创建的任何变量都是在所有用户之间共享的引用。 + +## 会话状态 (Session State) + +Gradio 在分块应用程序中同样支持会话**状态**,即在页面会话中跨多次提交保持的数据。需要再次强调,会话数据*不会*在模型的不同用户之间共享。要在会话状态中存储数据,需要完成以下三个步骤: + +1. 创建一个 `gr.State()` 对象。如果此可状态对象有一个默认值,请将其传递给构造函数。 +2. 在事件监听器中,将 `State` 对象作为输入和输出。 +3. 在事件监听器函数中,将变量添加到输入参数和返回值中。 + +让我们来看一个猜词游戏的例子。 + +$code_hangman +$demo_hangman + +让我们看看在这个游戏中如何完成上述的 3 个步骤: + +1. 我们将已使用的字母存储在 `used_letters_var` 中。在 `State` 的构造函数中,将其初始值设置为空列表`[]`。 +2. 在 `btn.click()` 中,我们在输入和输出中都引用了 `used_letters_var`。 +3. 在 `guess_letter` 中,我们将此 `State` 的值传递给 `used_letters`,然后在返回语句中返回更新后的该 `State` 的值。 + +对于更复杂的应用程序,您可能会在一个单独的分块应用程序中使用许多存储会话状态的 `State` 变量。 + +在[文档](https://gradio.app/docs#state)中了解更多关于 `State` 的信息。 diff --git a/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/04_custom-CSS-and-JS.md b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/04_custom-CSS-and-JS.md new file mode 100644 index 0000000000000000000000000000000000000000..ebdec040fcb143d1801b9f81299274fbb82746e1 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/04_custom-CSS-and-JS.md @@ -0,0 +1,58 @@ +# 自定义的 JS 和 CSS + +本指南介绍了如何更灵活地为 Blocks 添加样式,并添加 JavaScript 代码到事件监听器中。 + +**警告**:在自定义的 JS 和 CSS 中使用查询选择器不能保证能在所有 Gradio 版本中正常工作,因为 Gradio 的 HTML DOM 可能会发生变化。我们建议谨慎使用查询选择器。 + +## 自定义的 CSS + +Gradio 主题是自定义应用程序外观和感觉的最简单方式。您可以从各种主题中进行选择,或者创建自己的主题。要实现这一点,请将 `theme=` kwarg 传递给 `Blocks` 构造函数。例如: + +```python +with gr.Blocks(theme=gr.themes.Glass()): + ... +``` + +Gradio 自带一套预构建的主题,您可以从 `gr.themes.*` 中加载这些主题。您可以扩展这些主题,或者从头开始创建自己的主题 - 有关更多详细信息,请参阅[主题指南](/theming-guide)。 + +要增加附加的样式能力,您可以使用 `css=` kwarg 将任何 CSS 传递给您的应用程序。 + +Gradio 应用程序的基类是 `gradio-container`,因此下面是一个示例,用于更改 Gradio 应用程序的背景颜色: + +```python +with gr.Blocks(css=".gradio-container {background-color: red}") as demo: + ... +``` + +如果您想在您的 CSS 中引用外部文件,请使用 `"file="` 作为文件路径的前缀(可以是相对路径或绝对路径),例如: + +```python +with gr.Blocks(css=".gradio-container {background: url('file=clouds.jpg')}") as demo: + ... +``` + +您还可以将 CSS 文件的文件路径传递给 `css` 参数。 + +## `elem_id` 和 `elem_classes` 参数 + +您可以使用 `elem_id` 来为任何组件添加 HTML 元素 `id`,并使用 `elem_classes` 添加一个类或类列表。这将使您能够更轻松地使用 CSS 选择元素。这种方法更有可能在 Gradio 版本之间保持稳定,因为内置的类名或 id 可能会发生变化(但正如上面的警告中所提到的,如果您使用自定义 CSS,我们不能保证在 Gradio 版本之间完全兼容,因为 DOM 元素本身可能会发生变化)。 + +```python +css = """ +#warning {background-color: #FFCCCB} +.feedback textarea {font-size: 24px !important} +""" + +with gr.Blocks(css=css) as demo: + box1 = gr.Textbox(value="Good Job", elem_classes="feedback") + box2 = gr.Textbox(value="Failure", elem_id="warning", elem_classes="feedback") +``` + +CSS `#warning` 规则集仅针对第二个文本框,而 `.feedback` 规则集将同时作用于两个文本框。请注意,在针对类时,您可能需要使用 `!important` 选择器来覆盖默认的 Gradio 样式。 + +## 自定义的 JS + +事件监听器具有 `_js` 参数,可以接受 JavaScript 函数作为字符串,并像 Python 事件监听器函数一样处理它。您可以传递 JavaScript 函数和 Python 函数(在这种情况下,先运行 JavaScript 函数),或者仅传递 JavaScript(并将 Python 的 `fn` 设置为 `None`)。请查看下面的代码: + +$code_blocks_js_methods +$demo_blocks_js_methods diff --git a/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/05_using-blocks-like-functions.md b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/05_using-blocks-like-functions.md new file mode 100644 index 0000000000000000000000000000000000000000..48752a9cf9009e191cc60e045eb93d3a6009689c --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/03_building-with-blocks/05_using-blocks-like-functions.md @@ -0,0 +1,90 @@ +# 使用 Gradio 块像函数一样 + +Tags: TRANSLATION, HUB, SPACES + +**先决条件**: 本指南是在块介绍的基础上构建的。请确保[先阅读该指南](https://gradio.app/quickstart/#blocks-more-flexibility-and-control)。 + +## 介绍 + +你知道吗,除了作为一个全栈机器学习演示,Gradio 块应用其实也是一个普通的 Python 函数!? + +这意味着如果你有一个名为 `demo` 的 Gradio 块(或界面)应用,你可以像使用任何 Python 函数一样使用 `demo`。 + +所以,像 `output = demo("Hello", "friend")` 这样的操作会在输入为 "Hello" 和 "friend" 的情况下运行 `demo` 中定义的第一个事件,并将其存储在变量 `output` 中。 + +如果以上内容让你打瞌睡 🥱,请忍耐一下!通过将应用程序像函数一样使用,你可以轻松地组合 Gradio 应用。 +接下来的部分将展示如何实现。 + +## 将块视为函数 + +假设我们有一个将英文文本翻译为德文文本的演示块。 + +$code_english_translator + +我已经将它托管在 Hugging Face Spaces 上的 [gradio/english_translator](https://huggingface.co/spaces/gradio/english_translator)。 + +你也可以在下面看到演示: + +$demo_english_translator + +现在,假设你有一个生成英文文本的应用程序,但你还想额外生成德文文本。 + +你可以选择: + +1. 将我的英德翻译的源代码复制粘贴到你的应用程序中。 + +2. 在你的应用程序中加载我的英德翻译,并将其当作普通的 Python 函数处理。 + +选项 1 从技术上讲总是有效的,但它经常引入不必要的复杂性。 + +选项 2 允许你借用所需的功能,而不会过于紧密地耦合我们的应用程序。 + +你只需要在源文件中调用 `Blocks.load` 类方法即可。 +之后,你就可以像使用普通的 Python 函数一样使用我的翻译应用程序了! + +下面的代码片段和演示展示了如何使用 `Blocks.load`。 + +请注意,变量 `english_translator` 是我的英德翻译应用程序,但它在 `generate_text` 中像普通函数一样使用。 + +$code_generate_english_german + +$demo_generate_english_german + +## 如何控制使用应用程序中的哪个函数 + +如果你正在加载的应用程序定义了多个函数,你可以使用 `fn_index` 和 `api_name` 参数指定要使用的函数。 + +在英德演示的代码中,你会看到以下代码行: + +translate_btn.click(translate, inputs=english, outputs=german, api_name="translate-to-german") + +这个 `api_name` 在我们的应用程序中给这个函数一个唯一的名称。你可以使用这个名称告诉 Gradio 你想使用 +上游空间中的哪个函数: + +english_generator(text, api_name="translate-to-german")[0]["generated_text"] + +你也可以使用 `fn_index` 参数。 +假设我的应用程序还定义了一个英语到西班牙语的翻译函数。 +为了在我们的文本生成应用程序中使用它,我们将使用以下代码: + +english_generator(text, fn_index=1)[0]["generated_text"] + +Gradio 空间中的函数是从零开始索引的,所以西班牙语翻译器将是我的空间中的第二个函数, +因此你会使用索引 1。 + +## 结语 + +我们展示了将块应用视为普通 Python 函数的方法,这有助于在不同的应用程序之间组合功能。 +任何块应用程序都可以被视为一个函数,但一个强大的模式是在将其视为函数之前, +在[自己的应用程序中加载](https://huggingface.co/spaces)托管在[Hugging Face Spaces](https://huggingface.co/spaces)上的应用程序。 +您也可以加载托管在[Hugging Face Model Hub](https://huggingface.co/models)上的模型——有关示例,请参阅[使用 Hugging Face 集成](/using_hugging_face_integrations)指南。 + +### 开始构建!⚒️ + +## Parting Remarks + +我们展示了如何将 Blocks 应用程序视为常规 Python 函数,以便在不同的应用程序之间组合功能。 +任何 Blocks 应用程序都可以被视为函数,但是一种有效的模式是在将其视为自己应用程序的函数之前,先`加载`托管在[Hugging Face Spaces](https://huggingface.co/spaces)上的应用程序。 +您还可以加载托管在[Hugging Face Model Hub](https://huggingface.co/models)上的模型-请参见[使用 Hugging Face 集成指南](/using_hugging_face_integrations)中的示例。 + +### Happy building! ⚒️ diff --git a/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/01_using-hugging-face-integrations.md b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/01_using-hugging-face-integrations.md new file mode 100644 index 0000000000000000000000000000000000000000..b928e92a3c4417bb0c982fa860d3c75fcd51c997 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/01_using-hugging-face-integrations.md @@ -0,0 +1,136 @@ +# 使用 Hugging Face 集成 + +相关空间:https://huggingface.co/spaces/gradio/helsinki_translation_en_es +标签:HUB,SPACES,EMBED + +由 Omar Sanseviero 贡献🦙 + +## 介绍 + +Hugging Face Hub 是一个集成平台,拥有超过 190,000 个[模型](https://huggingface.co/models),32,000 个[数据集](https://huggingface.co/datasets)和 40,000 个[演示](https://huggingface.co/spaces),也被称为 Spaces。虽然 Hugging Face 以其🤗 transformers 和 diffusers 库而闻名,但 Hub 还支持许多机器学习库,如 PyTorch,TensorFlow,spaCy 等,涵盖了从计算机视觉到强化学习等各个领域。 + +Gradio 拥有多个功能,使其非常容易利用 Hub 上的现有模型和 Spaces。本指南将介绍这些功能。 + +## 使用 `pipeline` 进行常规推理 + +首先,让我们构建一个简单的界面,将英文翻译成西班牙文。在赫尔辛基大学共享的一千多个模型中,有一个[现有模型](https://huggingface.co/Helsinki-NLP/opus-mt-en-es),名为 `opus-mt-en-es`,可以正好做到这一点! + +🤗 transformers 库有一个非常易于使用的抽象层,[`pipeline()`](https://huggingface.co/docs/transformers/v4.16.2/en/main_classes/pipelines#transformers.pipeline)处理大部分复杂代码,为常见任务提供简单的 API。通过指定任务和(可选)模型,您可以使用几行代码使用现有模型: + +```python +import gradio as gr + +from transformers import pipeline + +pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es") + +def predict(text): + return pipe(text)[0]["translation_text"] + +demo = gr.Interface( + fn=predict, + inputs='text', + outputs='text', +) + +demo.launch() +``` + +但是,`gradio` 实际上使将 `pipeline` 转换为演示更加容易,只需使用 `gradio.Interface.from_pipeline` 方法,无需指定输入和输出组件: + +```python +from transformers import pipeline +import gradio as gr + +pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es") + +demo = gr.Interface.from_pipeline(pipe) +demo.launch() +``` + +上述代码生成了以下界面,您可以在浏览器中直接尝试: + + + +## Using Hugging Face Inference API + +Hugging Face 提供了一个名为[Inference API](https://huggingface.co/inference-api)的免费服务,允许您向 Hub 中的模型发送 HTTP 请求。对于基于 transformers 或 diffusers 的模型,API 的速度可以比自己运行推理快 2 到 10 倍。该 API 是免费的(受速率限制),您可以在想要在生产中使用时切换到专用的[推理端点](https://huggingface.co/pricing)。 + +让我们尝试使用推理 API 而不是自己加载模型的方式进行相同的演示。鉴于 Inference API 支持的 Hugging Face 模型,Gradio 可以自动推断出预期的输入和输出,并进行底层服务器调用,因此您不必担心定义预测函数。以下是代码示例! + +```python +import gradio as gr + +demo = gr.load("Helsinki-NLP/opus-mt-en-es", src="models") + +demo.launch() +``` + +请注意,我们只需指定模型名称并说明 `src` 应为 `models`(Hugging Face 的 Model Hub)。由于您不会在计算机上加载模型,因此无需安装任何依赖项(除了 `gradio`)。 + +您可能会注意到,第一次推理大约需要 20 秒。这是因为推理 API 正在服务器中加载模型。之后您会获得一些好处: + +- 推理速度更快。 +- 服务器缓存您的请求。 +- 您获得内置的自动缩放功能。 + +## 托管您的 Gradio 演示 + +[Hugging Face Spaces](https://hf.co/spaces)允许任何人免费托管其 Gradio 演示,上传 Gradio 演示只需几分钟。您可以前往[hf.co/new-space](https://huggingface.co/new-space),选择 Gradio SDK,创建一个 `app.py` 文件,完成!您将拥有一个可以与任何人共享的演示。要了解更多信息,请阅读[此指南以使用网站在 Hugging Face Spaces 上托管](https://huggingface.co/blog/gradio-spaces)。 + +或者,您可以通过使用[huggingface_hub client library](https://huggingface.co/docs/huggingface_hub/index)库来以编程方式创建一个 Space。这是一个示例: + +```python +from huggingface_hub import ( + create_repo, + get_full_repo_name, + upload_file, +) +create_repo(name=target_space_name, token=hf_token, repo_type="space", space_sdk="gradio") +repo_name = get_full_repo_name(model_id=target_space_name, token=hf_token) +file_url = upload_file( + path_or_fileobj="file.txt", + path_in_repo="app.py", + repo_id=repo_name, + repo_type="space", + token=hf_token, +) +``` + +在这里,`create_repo` 使用特定帐户的 Write Token 在特定帐户下创建一个带有目标名称的 gradio repo。`repo_name` 获取相关存储库的完整存储库名称。最后,`upload_file` 将文件上传到存储库中,并将其命名为 `app.py`。 + +## 在其他网站上嵌入您的 Space 演示 + +在本指南中,您已经看到了许多嵌入的 Gradio 演示。您也可以在自己的网站上这样做!第一步是创建一个包含您想展示的演示的 Hugging Face Space。然后,[按照此处的步骤将 Space 嵌入到您的网站上](/sharing-your-app/#embedding-hosted-spaces)。 + +## 从 Spaces 加载演示 + +您还可以在 Hugging Face Spaces 上使用和混合现有的 Gradio 演示。例如,您可以将两个现有的 Gradio 演示放在单独的选项卡中并创建一个新的演示。您可以在本地运行此新演示,或将其上传到 Spaces,为混合和创建新的演示提供无限可能性! + +以下是一个完全实现此目标的示例: + +```python +import gradio as gr + +with gr.Blocks() as demo: + with gr.Tab("Translate to Spanish"): + gr.load("gradio/helsinki_translation_en_es", src="spaces") + with gr.Tab("Translate to French"): + gr.load("abidlabs/en2fr", src="spaces") + +demo.launch() +``` + +请注意,我们使用了 `gr.load()`,这与使用推理 API 加载模型所使用的方法相同。但是,在这里,我们指定 `src` 为 `spaces`(Hugging Face Spaces)。 + +## 小结 + +就是这样!让我们回顾一下 Gradio 和 Hugging Face 共同工作的各种方式: + +1. 您可以使用 `from_pipeline()` 将 `transformers` pipeline 转换为 Gradio 演示 +2. 您可以使用 `gr.load()` 轻松地围绕推理 API 构建演示,而无需加载模型 +3. 您可以在 Hugging Face Spaces 上托管您的 Gradio 演示,可以使用 GUI 或完全使用 Python。 +4. 您可以将托管在 Hugging Face Spaces 上的 Gradio 演示嵌入到自己的网站上。 +5. 您可以使用 `gr.load()` 从 Hugging Face Spaces 加载演示,以重新混合和创建新的 Gradio 演示。 + +🤗 diff --git a/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-Comet.md b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-Comet.md new file mode 100644 index 0000000000000000000000000000000000000000..fd0aea7db0a696ce14df75b52713a5c643194b05 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-Comet.md @@ -0,0 +1,269 @@ +# 使用 Gradio 和 Comet + +Tags: COMET, SPACES +由 Comet 团队贡献 + +## 介绍 + +在这个指南中,我们将展示您可以如何使用 Gradio 和 Comet。我们将介绍使用 Comet 和 Gradio 的基本知识,并向您展示如何利用 Gradio 的高级功能,如 [使用 iFrames 进行嵌入](https://www.gradio.app/sharing-your-app/#embedding-with-iframes) 和 [状态](https://www.gradio.app/docs/#state) 来构建一些令人惊叹的模型评估工作流程。 + +下面是本指南涵盖的主题列表。 + +1. 将 Gradio UI 记录到您的 Comet 实验中 +2. 直接将 Gradio 应用程序嵌入到您的 Comet 项目中 +3. 直接将 Hugging Face Spaces 嵌入到您的 Comet 项目中 +4. 将 Gradio 应用程序的模型推理记录到 Comet 中 + +## 什么是 Comet? + +[Comet](https://www.comet.com?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs) 是一个 MLOps 平台,旨在帮助数据科学家和团队更快地构建更好的模型!Comet 提供工具来跟踪、解释、管理和监控您的模型,集中在一个地方!它可以与 Jupyter 笔记本和脚本配合使用,最重要的是,它是 100% 免费的! + +## 设置 + +首先,安装运行这些示例所需的依赖项 + +```shell +pip install comet_ml torch torchvision transformers gradio shap requests Pillow +``` + +接下来,您需要[注册一个 Comet 账户](https://www.comet.com/signup?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs)。一旦您设置了您的账户,[获取您的 API 密钥](https://www.comet.com/docs/v2/guides/getting-started/quickstart/#get-an-api-key?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs) 并配置您的 Comet 凭据 + +如果您将这些示例作为脚本运行,您可以将您的凭据导出为环境变量 + +```shell +export COMET_API_KEY="<您的 API 密钥>" +export COMET_WORKSPACE="<您的工作空间名称>" +export COMET_PROJECT_NAME="<您的项目名称>" +``` + +或者将它们设置在您的工作目录中的 `.comet.config` 文件中。您的文件应按以下方式格式化。 + +```shell +[comet] +api_key=<您的 API 密钥> +workspace=<您的工作空间名称> +project_name=<您的项目名称> +``` + +如果您使用提供的 Colab Notebooks 运行这些示例,请在开始 Gradio UI 之前运行带有以下片段的单元格。运行此单元格可以让您交互式地将 API 密钥添加到笔记本中。 + +```python +import comet_ml +comet_ml.init() +``` + +## 1. 将 Gradio UI 记录到您的 Comet 实验中 + +[![在 Colab 中打开](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/comet-examples/blob/master/integrations/model-evaluation/gradio/notebooks/Gradio_and_Comet.ipynb) + +在这个例子中,我们将介绍如何将您的 Gradio 应用程序记录到 Comet,并使用 Gradio 自定义面板与其进行交互。 + +我们先通过使用 `resnet18` 构建一个简单的图像分类示例。 + +```python +import comet_ml + +import requests +import torch +from PIL import Image +from torchvision import transforms + +torch.hub.download_url_to_file("https://github.com/pytorch/hub/raw/master/images/dog.jpg", "dog.jpg") + +if torch.cuda.is_available(): + device = "cuda" +else: + device = "cpu" + +model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval() +model = model.to(device) + +# 为 ImageNet 下载可读的标签。 +response = requests.get("https://git.io/JJkYN") +labels = response.text.split("\n") + + +def predict(inp): + inp = Image.fromarray(inp.astype("uint8"), "RGB") + inp = transforms.ToTensor()(inp).unsqueeze(0) + with torch.no_grad(): + prediction = torch.nn.functional.softmax(model(inp.to(device))[0], dim=0) + return {labels[i]: float(prediction[i]) for i in range(1000)} + + +inputs = gr.Image() +outputs = gr.Label(num_top_classes=3) + +io = gr.Interface( + fn=predict, inputs=inputs, outputs=outputs, examples=["dog.jpg"] +) +io.launch(inline=False, share=True) + +experiment = comet_ml.Experiment() +experiment.add_tag("image-classifier") + +io.integrate(comet_ml=experiment) +``` + +此片段中的最后一行将将 Gradio 应用程序的 URL 记录到您的 Comet 实验中。您可以在实验的文本选项卡中找到该 URL。 + + + +将 Gradio 面板添加到您的实验中,与应用程序进行交互。 + + + +## 2. 直接将 Gradio 应用程序嵌入到您的 Comet 项目中 + + + +如果您要长期托管 Gradio 应用程序,可以使用 Gradio Panel Extended 自定义面板进行嵌入 UI。 + +转到您的 Comet 项目页面,转到面板选项卡。单击“+ 添加”按钮以打开面板搜索页面。 + +adding-panels + +接下来,在公共面板部分搜索 Gradio Panel Extended 并单击“添加”。 + +gradio-panel-extended + +添加面板后,单击“编辑”以访问面板选项页面,并粘贴您的 Gradio 应用程序的 URL。 + +![Edit-Gradio-Panel-Options](https://user-images.githubusercontent.com/7529846/214573001-23814b5a-ca65-4ace-a8a5-b27cdda70f7a.gif) + +Edit-Gradio-Panel-URL + +## 3. 直接将 Hugging Face Spaces 嵌入到您的 Comet 项目中 + + + +您还可以使用 Hugging Face Spaces 面板将托管在 Hugging Faces Spaces 中的 Gradio 应用程序嵌入到您的 Comet 项目中。 + +转到 Comet 项目页面,转到面板选项卡。单击“+添加”按钮以打开面板搜索页面。然后,在公共面板部分搜索 Hugging Face Spaces 面板并单击“添加”。 + +huggingface-spaces-panel + +添加面板后,单击“编辑”以访问面板选项页面,并粘贴您的 Hugging Face Space 路径,例如 `pytorch/ResNet` + +Edit-HF-Space + +## 4. 记录模型推断结果到 Comet + + + +[![在 Colab 中打开](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/comet-ml/comet-examples/blob/master/integrations/model-evaluation/gradio/notebooks/Logging_Model_Inferences_with_Comet_and_Gradio.ipynb) + +在前面的示例中,我们演示了通过 Comet UI 与 Gradio 应用程序交互的各种方法。此外,您还可以将 Gradio 应用程序的模型推断(例如 SHAP 图)记录到 Comet 中。 + +在以下代码段中,我们将记录来自文本生成模型的推断。我们可以使用 Gradio 的[State](https://www.gradio.app/docs/#state)对象在多次推断调用之间保持实验的持久性。这将使您能够将多个模型推断记录到单个实验中。 + +```python +import comet_ml +import gradio as gr +import shap +import torch +from transformers import AutoModelForCausalLM, AutoTokenizer + +if torch.cuda.is_available(): + device = "cuda" +else: + device = "cpu" + +MODEL_NAME = "gpt2" + +model = AutoModelForCausalLM.from_pretrained(MODEL_NAME) + +# set model decoder to true +model.config.is_decoder = True +# set text-generation params under task_specific_params +model.config.task_specific_params["text-generation"] = { + "do_sample": True, + "max_length": 50, + "temperature": 0.7, + "top_k": 50, + "no_repeat_ngram_size": 2, +} +model = model.to(device) + +tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) +explainer = shap.Explainer(model, tokenizer) + + +def start_experiment(): + """Returns an APIExperiment object that is thread safe + and can be used to log inferences to a single Experiment + """ + try: + api = comet_ml.API() + workspace = api.get_default_workspace() + project_name = comet_ml.config.get_config()["comet.project_name"] + + experiment = comet_ml.APIExperiment( + workspace=workspace, project_name=project_name + ) + experiment.log_other("Created from", "gradio-inference") + + message = f"Started Experiment: [{experiment.name}]({experiment.url})" + return (experiment, message) + + except Exception as e: + return None, None + + +def predict(text, state, message): + experiment = state + + shap_values = explainer([text]) + plot = shap.plots.text(shap_values, display=False) + + if experiment is not None: + experiment.log_other("message", message) + experiment.log_html(plot) + + return plot + + +with gr.Blocks() as demo: + start_experiment_btn = gr.Button("Start New Experiment") + experiment_status = gr.Markdown() + + # Log a message to the Experiment to provide more context + experiment_message = gr.Textbox(label="Experiment Message") + experiment = gr.State() + + input_text = gr.Textbox(label="Input Text", lines=5, interactive=True) + submit_btn = gr.Button("Submit") + + output = gr.HTML(interactive=True) + + start_experiment_btn.click( + start_experiment, outputs=[experiment, experiment_status] + ) + submit_btn.click( + predict, inputs=[input_text, experiment, experiment_message], outputs=[output] + ) +``` + +该代码段中的推断结果将保存在实验的 HTML 选项卡中。 + + + +## 结论 + +希望您对本指南有所裨益,并能为您构建出色的 Comet 和 Gradio 模型评估工作流程提供一些启示。 + +## 如何在 Comet 组织上贡献 Gradio 演示 + +- 在 Hugging Face 上创建帐号[此处](https://huggingface.co/join)。 +- 在用户名下添加 Gradio 演示,请参阅[此处](https://huggingface.co/course/chapter9/4?fw=pt)以设置 Gradio 演示。 +- 请求加入 Comet 组织[此处](https://huggingface.co/Comet)。 + +## 更多资源 + +- [Comet 文档](https://www.comet.com/docs/v2/?utm_source=gradio&utm_medium=referral&utm_campaign=gradio-integration&utm_content=gradio-docs) diff --git a/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-ONNX-on-Hugging-Face.md b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-ONNX-on-Hugging-Face.md new file mode 100644 index 0000000000000000000000000000000000000000..a8e8d949889f5b97ea50400f52d9991ac4983ed5 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-ONNX-on-Hugging-Face.md @@ -0,0 +1,142 @@ +# Gradio 和 ONNX 在 Hugging Face 上 + +Related spaces: https://huggingface.co/spaces/onnx/EfficientNet-Lite4 +Tags: ONNX,SPACES +由 Gradio 和 ONNX 团队贡献 + +## 介绍 + +在这个指南中,我们将为您介绍以下内容: + +- ONNX、ONNX 模型仓库、Gradio 和 Hugging Face Spaces 的介绍 +- 如何为 EfficientNet-Lite4 设置 Gradio 演示 +- 如何为 Hugging Face 上的 ONNX 组织贡献自己的 Gradio 演示 + +下面是一个 ONNX 模型的示例:在下面尝试 EfficientNet-Lite4 演示。 + + + +## ONNX 模型仓库是什么? + +Open Neural Network Exchange([ONNX](https://onnx.ai/))是一种表示机器学习模型的开放标准格式。ONNX 由一个实现了该格式的合作伙伴社区支持,该社区将其实施到许多框架和工具中。例如,如果您在 TensorFlow 或 PyTorch 中训练了一个模型,您可以轻松地将其转换为 ONNX,然后使用类似 ONNX Runtime 的引擎 / 编译器在各种设备上运行它。 + +[ONNX 模型仓库](https://github.com/onnx/models)是由社区成员贡献的一组预训练的先进模型,格式为 ONNX。每个模型都附带了用于模型训练和运行推理的 Jupyter 笔记本。这些笔记本以 Python 编写,并包含到训练数据集的链接,以及描述模型架构的原始论文的参考文献。 + +## Hugging Face Spaces 和 Gradio 是什么? + +### Gradio + +Gradio 可让用户使用 Python 代码将其机器学习模型演示为 Web 应用程序。Gradio 将 Python 函数封装到用户界面中,演示可以在 jupyter 笔记本、colab 笔记本中启动,并可以嵌入到您自己的网站上,并在 Hugging Face Spaces 上免费托管。 + +在此处开始[https://gradio.app/getting_started](https://gradio.app/getting_started) + +### Hugging Face Spaces + +Hugging Face Spaces 是 Gradio 演示的免费托管选项。Spaces 提供了 3 种 SDK 选项:Gradio、Streamlit 和静态 HTML 演示。Spaces 可以是公共的或私有的,工作流程与 github repos 类似。目前 Hugging Face 上有 2000 多个 Spaces。在此处了解更多关于 Spaces 的信息[https://huggingface.co/spaces/launch](https://huggingface.co/spaces/launch)。 + +### Hugging Face 模型 + +Hugging Face 模型中心还支持 ONNX 模型,并且可以通过[ONNX 标签](https://huggingface.co/models?library=onnx&sort=downloads)对 ONNX 模型进行筛选 + +## Hugging Face 是如何帮助 ONNX 模型仓库的? + +ONNX 模型仓库中有许多 Jupyter 笔记本供用户测试模型。以前,用户需要自己下载模型并在本地运行这些笔记本测试。有了 Hugging Face,测试过程可以更简单和用户友好。用户可以在 Hugging Face Spaces 上轻松尝试 ONNX 模型仓库中的某个模型,并使用 ONNX Runtime 运行由 Gradio 提供支持的快速演示,全部在云端进行,无需在本地下载任何内容。请注意,ONNX 有各种运行时,例如[ONNX Runtime](https://github.com/microsoft/onnxruntime)、[MXNet](https://github.com/apache/incubator-mxnet)等 + +## ONNX Runtime 的作用是什么? + +ONNX Runtime 是一个跨平台的推理和训练机器学习加速器。它使得在 Hugging Face 上使用 ONNX 模型仓库中的模型进行实时 Gradio 演示成为可能。 + +ONNX Runtime 可以实现更快的客户体验和更低的成本,支持来自 PyTorch 和 TensorFlow/Keras 等深度学习框架以及 scikit-learn、LightGBM、XGBoost 等传统机器学习库的模型。ONNX Runtime 与不同的硬件、驱动程序和操作系统兼容,并通过利用适用的硬件加速器以及图形优化和转换提供最佳性能。有关更多信息,请参阅[官方网站](https://onnxruntime.ai/)。 + +## 为 EfficientNet-Lite4 设置 Gradio 演示 + +EfficientNet-Lite 4 是 EfficientNet-Lite 系列中最大和最准确的模型。它是一个仅使用整数量化的模型,能够在所有 EfficientNet 模型中提供最高的准确率。在 Pixel 4 CPU 上以实时方式运行(例如 30ms/ 图像)时,可以实现 80.4%的 ImageNet top-1 准确率。要了解更多信息,请阅读[模型卡片](https://github.com/onnx/models/tree/main/vision/classification/efficientnet-lite4) + +在这里,我们将演示如何使用 Gradio 为 EfficientNet-Lite4 设置示例演示 + +首先,我们导入所需的依赖项并下载和载入来自 ONNX 模型仓库的 efficientnet-lite4 模型。然后从 labels_map.txt 文件加载标签。接下来,我们设置预处理函数、加载用于推理的模型并设置推理函数。最后,将推理函数封装到 Gradio 接口中,供用户进行交互。下面是完整的代码。 + +```python +import numpy as np +import math +import matplotlib.pyplot as plt +import cv2 +import json +import gradio as gr +from huggingface_hub import hf_hub_download +from onnx import hub +import onnxruntime as ort + +# 从ONNX模型仓库加载ONNX模型 +model = hub.load("efficientnet-lite4") +# 加载标签文本文件 +labels = json.load(open("labels_map.txt", "r")) + +# 通过将图像从中心调整大小并裁剪到224x224来设置图像文件的尺寸 +def pre_process_edgetpu(img, dims): + output_height, output_width, _ = dims + img = resize_with_aspectratio(img, output_height, output_width, inter_pol=cv2.INTER_LINEAR) + img = center_crop(img, output_height, output_width) + img = np.asarray(img, dtype='float32') + # 将jpg像素值从[0 - 255]转换为浮点数组[-1.0 - 1.0] + img -= [127.0, 127.0, 127.0] + img /= [128.0, 128.0, 128.0] + return img + +# 使用等比例缩放调整图像尺寸 +def resize_with_aspectratio(img, out_height, out_width, scale=87.5, inter_pol=cv2.INTER_LINEAR): + height, width, _ = img.shape + new_height = int(100. * out_height / scale) + new_width = int(100. * out_width / scale) + if height > width: + w = new_width + h = int(new_height * height / width) + else: + h = new_height + w = int(new_width * width / height) + img = cv2.resize(img, (w, h), interpolation=inter_pol) + return img + +# crops the image around the center based on given height and width +def center_crop(img, out_height, out_width): + height, width, _ = img.shape + left = int((width - out_width) / 2) + right = int((width + out_width) / 2) + top = int((height - out_height) / 2) + bottom = int((height + out_height) / 2) + img = img[top:bottom, left:right] + return img + + +sess = ort.InferenceSession(model) + +def inference(img): + img = cv2.imread(img) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + + img = pre_process_edgetpu(img, (224, 224, 3)) + + img_batch = np.expand_dims(img, axis=0) + + results = sess.run(["Softmax:0"], {"images:0": img_batch})[0] + result = reversed(results[0].argsort()[-5:]) + resultdic = {} + for r in result: + resultdic[labels[str(r)]] = float(results[0][r]) + return resultdic + +title = "EfficientNet-Lite4" +description = "EfficientNet-Lite 4是最大的变体,也是EfficientNet-Lite模型集合中最准确的。它是一个仅包含整数的量化模型,具有所有EfficientNet模型中最高的准确度。在Pixel 4 CPU上,它实现了80.4%的ImageNet top-1准确度,同时仍然可以实时运行(例如30ms/图像)。" +examples = [['catonnx.jpg']] +gr.Interface(inference, gr.Image(type="filepath"), "label", title=title, description=description, examples=examples).launch() +``` + +## 如何使用 ONNX 模型在 HF Spaces 上贡献 Gradio 演示 + +- 将模型添加到[onnx model zoo](https://github.com/onnx/models/blob/main/.github/PULL_REQUEST_TEMPLATE.md) +- 在 Hugging Face 上创建一个账号[here](https://huggingface.co/join). +- 要查看还有哪些模型需要添加到 ONNX 组织中,请参阅[Models list](https://github.com/onnx/models#models)中的列表 +- 在您的用户名下添加 Gradio Demo,请参阅此[博文](https://huggingface.co/blog/gradio-spaces)以在 Hugging Face 上设置 Gradio Demo。 +- 请求加入 ONNX 组织[here](https://huggingface.co/onnx). +- 一旦获准,将模型从您的用户名下转移到 ONNX 组织 +- 在模型表中为模型添加徽章,在[Models list](https://github.com/onnx/models#models)中查看示例 diff --git a/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-Wandb-Integration.md b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-Wandb-Integration.md new file mode 100644 index 0000000000000000000000000000000000000000..337951096a0fb8c0b96c9900d15cb3f99cc9fdda --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/Gradio-and-Wandb-Integration.md @@ -0,0 +1,284 @@ +# Gradio and W&B Integration + +相关空间:https://huggingface.co/spaces/akhaliq/JoJoGAN +标签:WANDB, SPACES +由 Gradio 团队贡献 + +## 介绍 + +在本指南中,我们将引导您完成以下内容: + +- Gradio、Hugging Face Spaces 和 Wandb 的介绍 +- 如何使用 Wandb 集成为 JoJoGAN 设置 Gradio 演示 +- 如何在 Hugging Face 的 Wandb 组织中追踪实验并贡献您自己的 Gradio 演示 + +下面是一个使用 Wandb 跟踪训练和实验的模型示例,请在下方尝试 JoJoGAN 演示。 + + + +## 什么是 Wandb? + +Weights and Biases (W&B) 允许数据科学家和机器学习科学家在从训练到生产的每个阶段跟踪他们的机器学习实验。任何指标都可以对样本进行聚合,并在可自定义和可搜索的仪表板中显示,如下所示: + +Screen Shot 2022-08-01 at 5 54 59 PM + +## 什么是 Hugging Face Spaces 和 Gradio? + +### Gradio + +Gradio 让用户可以使用几行 Python 代码将其机器学习模型演示为 Web 应用程序。Gradio 将任何 Python 函数(例如机器学习模型的推断函数)包装成一个用户界面,这些演示可以在 jupyter 笔记本、colab 笔记本中启动,也可以嵌入到您自己的网站中,免费托管在 Hugging Face Spaces 上。 + +在这里开始 [here](https://gradio.app/getting_started) + +### Hugging Face Spaces + +Hugging Face Spaces 是 Gradio 演示的免费托管选项。Spaces 有 3 个 SDK 选项:Gradio、Streamlit 和静态 HTML 演示。Spaces 可以是公共的或私有的,工作流程类似于 github 存储库。目前在 Hugging Face 上有 2000 多个 Spaces。了解更多关于 Spaces 的信息 [here](https://huggingface.co/spaces/launch)。 + +## 为 JoJoGAN 设置 Gradio 演示 + +现在,让我们引导您如何在自己的环境中完成此操作。我们假设您对 W&B 和 Gradio 还不太了解,只是为了本教程的目的。 + +让我们开始吧! + +1. 创建 W&B 账号 + + 如果您还没有 W&B 账号,请按照[这些快速说明](https://app.wandb.ai/login)创建免费账号。这不应该超过几分钟的时间。一旦完成(或者如果您已经有一个账户),接下来,我们将运行一个快速的 colab。 + +2. 打开 Colab 安装 Gradio 和 W&B + + 我们将按照 JoJoGAN 存储库中提供的 colab 进行操作,稍作修改以更有效地使用 Wandb 和 Gradio。 + + [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/mchong6/JoJoGAN/blob/main/stylize.ipynb) + + 在顶部安装 Gradio 和 Wandb: + + ```sh + + pip install gradio wandb + ``` + +3. 微调 StyleGAN 和 W&B 实验跟踪 + + 下一步将打开一个 W&B 仪表板,以跟踪实验,并显示一个 Gradio 演示提供的预训练模型,您可以从下拉菜单中选择。这是您需要的代码: + + ```python + + alpha = 1.0 + alpha = 1-alpha + + preserve_color = True + num_iter = 100 + log_interval = 50 + + samples = [] + column_names = ["Reference (y)", "Style Code(w)", "Real Face Image(x)"] + + wandb.init(project="JoJoGAN") + config = wandb.config + config.num_iter = num_iter + config.preserve_color = preserve_color + wandb.log( + {"Style reference": [wandb.Image(transforms.ToPILImage()(target_im))]}, + step=0) + + # 加载判别器用于感知损失 + discriminator = Discriminator(1024, 2).eval().to(device) + ckpt = torch.load('models/stylegan2-ffhq-config-f.pt', map_location=lambda storage, loc: storage) + discriminator.load_state_dict(ckpt["d"], strict=False) + + # 重置生成器 + del generator + generator = deepcopy(original_generator) + + g_optim = optim.Adam(generator.parameters(), lr=2e-3, betas=(0, 0.99)) + + # 用于生成一族合理真实图像-> 假图像的更换图层 + if preserve_color: + id_swap = [9,11,15,16,17] + else: + id_swap = list(range(7, generator.n_latent)) + + for idx in tqdm(range(num_iter)): + mean_w = generator.get_latent(torch.randn([latents.size(0), latent_dim]).to(device)).unsqueeze(1).repeat(1, generator.n_latent, 1) + in_latent = latents.clone() + in_latent[:, id_swap] = alpha*latents[:, id_swap] + (1-alpha)*mean_w[:, id_swap] + + img = generator(in_latent, input_is_latent=True) + + with torch.no_grad(): + real_feat = discriminator(targets) + fake_feat = discriminator(img) + + loss = sum([F.l1_loss(a, b) for a, b in zip(fake_feat, real_feat)])/len(fake_feat) + + wandb.log({"loss": loss}, step=idx) + if idx % log_interval == 0: + generator.eval() + my_sample = generator(my_w, input_is_latent=True) + generator.train() + my_sample = transforms.ToPILImage()(utils.make_grid(my_sample, normalize=True, range=(-1, 1))) + wandb.log( + {"Current stylization": [wandb.Image(my_sample)]}, + step=idx) + table_data = [ + wandb.Image(transforms.ToPILImage()(target_im)), + wandb.Image(img), + wandb.Image(my_sample), + ] + samples.append(table_data) + + g_optim.zero_grad() + loss.backward() + g_optim.step() + + out_table = wandb.Table(data=samples, columns=column_names) + wandb.log({" 当前样本数 ": out_table}) + ``` + +4. 保存、下载和加载模型 + + 以下是如何保存和下载您的模型。 + + ```python + + from PIL import Image + import torch + torch.backends.cudnn.benchmark = True + from torchvision import transforms, utils + from util import * + import math + import random + import numpy as np + from torch import nn, autograd, optim + from torch.nn import functional as F + from tqdm import tqdm + import lpips + from model import * + from e4e_projection import projection as e4e_projection + + from copy import deepcopy + import imageio + + import os + import sys + import torchvision.transforms as transforms + from argparse import Namespace + from e4e.models.psp import pSp + from util import * + from huggingface_hub import hf_hub_download + from google.colab import files + torch.save({"g": generator.state_dict()}, "your-model-name.pt") + + files.download('your-model-name.pt') + + latent_dim = 512 + device="cuda" + model_path_s = hf_hub_download(repo_id="akhaliq/jojogan-stylegan2-ffhq-config-f", filename="stylegan2-ffhq-config-f.pt") + original_generator = Generator(1024, latent_dim, 8, 2).to(device) + ckpt = torch.load(model_path_s, map_location=lambda storage, loc: storage) + original_generator.load_state_dict(ckpt["g_ema"], strict=False) + mean_latent = original_generator.mean_latent(10000) + + generator = deepcopy(original_generator) + + ckpt = torch.load("/content/JoJoGAN/your-model-name.pt", map_location=lambda storage, loc: storage) + generator.load_state_dict(ckpt["g"], strict=False) + generator.eval() + + plt.rcParams['figure.dpi'] = 150 + + transform = transforms.Compose( + [ + transforms.Resize((1024, 1024)), + transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)), + ] + ) + + def inference(img): + img.save('out.jpg') + aligned_face = align_face('out.jpg') + + my_w = e4e_projection(aligned_face, "out.pt", device).unsqueeze(0) + with torch.no_grad(): + my_sample = generator(my_w, input_is_latent=True) + + npimage = my_sample[0].cpu().permute(1, 2, 0).detach().numpy() + imageio.imwrite('filename.jpeg', npimage) + return 'filename.jpeg' + ``` + +5. 构建 Gradio 演示 + + ```python + + import gradio as gr + + title = "JoJoGAN" + description = "JoJoGAN 的 Gradio 演示:一次性面部风格化。要使用它,只需上传您的图像,或单击示例之一加载它们。在下面的链接中阅读更多信息。" + + demo = gr.Interface( + inference, + gr.Image(type="pil"), + gr.Image(type=" 文件 "), + title=title, + description=description + ) + + demo.launch(share=True) + ``` + +6. 将 Gradio 集成到 W&B 仪表板 + + 最后一步——将 Gradio 演示与 W&B 仪表板集成,只需要一行额外的代码 : + + ```python + + demo.integrate(wandb=wandb) + ``` + + 调用集成之后,将创建一个演示,您可以将其集成到仪表板或报告中 + + 在 W&B 之外,使用 gradio-app 标记允许任何人直接将 Gradio 演示嵌入到其博客、网站、文档等中的 HF spaces 上 : + + ```html + <gradio-app space="akhaliq/JoJoGAN"> <gradio-app> + ``` + +7.(可选)在 Gradio 应用程序中嵌入 W&B 图 + + 也可以在 Gradio 应用程序中嵌入 W&B 图。为此,您可以创建一个 W&B 报告,并在一个 `gr.HTML` 块中将其嵌入到 Gradio 应用程序中。 + + 报告需要是公开的,您需要在 iFrame 中包装 URL,如下所示 : + The Report will need to be public and you will need to wrap the URL within an iFrame like this: + ```python + + import gradio as gr + + def wandb_report(url): + iframe = f'<iframe src={url} style="border:none;height:1024px;width:100%">' + return gr.HTML(iframe) + + with gr.Blocks() as demo: + report_url = 'https://wandb.ai/_scott/pytorch-sweeps-demo/reports/loss-22-10-07-16-00-17---VmlldzoyNzU2NzAx' + report = wandb_report(report_url) + + demo.launch(share=True) + ``` + +## 结论 + +希望您喜欢此嵌入 Gradio 演示到 W&B 报告的简短演示!感谢您一直阅读到最后。回顾一下 : + +- 仅需要一个单一参考图像即可对 JoJoGAN 进行微调,通常在 GPU 上需要约 1 分钟。训练完成后,可以将样式应用于任何输入图像。在论文中阅读更多内容。 + +- W&B 可以通过添加几行代码来跟踪实验,您可以在单个集中的仪表板中可视化、排序和理解您的实验。 + +- Gradio 则在用户友好的界面中演示模型,可以在网络上任何地方共享。 + +## 如何在 Wandb 组织的 HF spaces 上 贡献 Gradio 演示 + +- 在 Hugging Face 上创建一个帐户[此处](https://huggingface.co/join)。 +- 在您的用户名下添加 Gradio 演示,请参阅[此教程](https://huggingface.co/course/chapter9/4?fw=pt) 以在 Hugging Face 上设置 Gradio 演示。 +- 申请加入 wandb 组织[此处](https://huggingface.co/wandb)。 +- 批准后,将模型从自己的用户名转移到 Wandb 组织中。 diff --git a/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-in-pytorch.md b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-in-pytorch.md new file mode 100644 index 0000000000000000000000000000000000000000..458885207abd021455dabaa5752319737669b94a --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-in-pytorch.md @@ -0,0 +1,88 @@ +# PyTorch 图像分类 + +Related spaces: https://huggingface.co/spaces/abidlabs/pytorch-image-classifier, https://huggingface.co/spaces/pytorch/ResNet, https://huggingface.co/spaces/pytorch/ResNext, https://huggingface.co/spaces/pytorch/SqueezeNet +Tags: VISION, RESNET, PYTORCH + +## 介绍 + +图像分类是计算机视觉中的一个核心任务。构建更好的分类器以区分图片中存在的物体是当前研究的一个热点领域,因为它的应用范围从自动驾驶车辆到医学成像等领域都很广泛。 + +这样的模型非常适合 Gradio 的 _image_ 输入组件,因此在本教程中,我们将使用 Gradio 构建一个用于图像分类的 Web 演示。我们将能够在 Python 中构建整个 Web 应用程序,效果如下(试试其中一个示例!): + + + +让我们开始吧! + +### 先决条件 + +确保您已经[安装](/getting_started)了 `gradio` Python 包。我们将使用一个预训练的图像分类模型,所以您还应该安装了 `torch`。 + +## 第一步 - 设置图像分类模型 + +首先,我们需要一个图像分类模型。在本教程中,我们将使用一个预训练的 Resnet-18 模型,因为它可以从[PyTorch Hub](https://pytorch.org/hub/pytorch_vision_resnet/)轻松下载。您可以使用其他预训练模型或训练自己的模型。 + +```python +import torch + +model = torch.hub.load('pytorch/vision:v0.6.0', 'resnet18', pretrained=True).eval() +``` + +由于我们将使用模型进行推断,所以我们调用了 `.eval()` 方法。 + +## 第二步 - 定义 `predict` 函数 + +接下来,我们需要定义一个函数,该函数接受*用户输入*,在本示例中是一张图片,并返回预测结果。预测结果应该以字典的形式返回,其中键是类别名称,值是置信度概率。我们将从这个[text 文件](https://git.io/JJkYN)中加载类别名称。 + +对于我们的预训练模型,它的代码如下: + +```python +import requests +from PIL import Image +from torchvision import transforms + +# 下载ImageNet的可读标签。 +response = requests.get("https://git.io/JJkYN") +labels = response.text.split("\n") + +def predict(inp): + inp = transforms.ToTensor()(inp).unsqueeze(0) + with torch.no_grad(): + prediction = torch.nn.functional.softmax(model(inp)[0], dim=0) + confidences = {labels[i]: float(prediction[i]) for i in range(1000)} + return confidences +``` + +让我们逐步来看一下这段代码。该函数接受一个参数: + +- `inp`:输入图片,类型为 `PIL` 图像 + +然后,该函数将图像转换为 PIL 图像,最终转换为 PyTorch 的 `tensor`,将其输入模型,并返回: + +- `confidences`:预测结果,以字典形式表示,其中键是类别标签,值是置信度概率 + +## 第三步 - 创建 Gradio 界面 + +现在我们已经设置好了预测函数,我们可以创建一个 Gradio 界面。 + +在本例中,输入组件是一个拖放图片的组件。为了创建这个输入组件,我们使用 `Image(type="pil")` 来创建该组件,并处理预处理操作将其转换为 `PIL` 图像。 + +输出组件将是一个 `Label`,它以良好的形式显示顶部标签。由于我们不想显示所有 1000 个类别标签,所以我们将其定制为只显示前 3 个标签,构造为 `Label(num_top_classes=3)`。 + +最后,我们添加了一个 `examples` 参数,允许我们预填一些预定义的示例到界面中。Gradio 的代码如下: + +```python +import gradio as gr + +gr.Interface(fn=predict, + inputs=gr.Image(type="pil"), + outputs=gr.Label(num_top_classes=3), + examples=["lion.jpg", "cheetah.jpg"]).launch() +``` + +这将产生以下界面,您可以在浏览器中直接尝试(试试上传自己的示例图片!): + + + +--- + +完成了!这就是构建图像分类器 Web 演示所需的所有代码。如果您想与他人共享,请在 `launch()` 接口时设置 `share=True`! diff --git a/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-in-tensorflow.md b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-in-tensorflow.md new file mode 100644 index 0000000000000000000000000000000000000000..5e05cdcb37155c378e7a6865bab36bcdcec1d1cf --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-in-tensorflow.md @@ -0,0 +1,86 @@ +# TensorFlow 和 Keras 中的图像分类 + +相关空间:https://huggingface.co/spaces/abidlabs/keras-image-classifier +标签:VISION, MOBILENET, TENSORFLOW + +## 简介 + +图像分类是计算机视觉中的一项核心任务。构建更好的分类器来识别图像中的物体是一个研究的热点领域,因为它在交通控制系统到卫星成像等应用中都有广泛的应用。 + +这样的模型非常适合与 Gradio 的 _image_ 输入组件一起使用,因此在本教程中,我们将使用 Gradio 构建一个用于图像分类的 Web 演示。我们可以在 Python 中构建整个 Web 应用程序,它的界面将如下所示(试试其中一个例子!): + + + +让我们开始吧! + +### 先决条件 + +确保您已经[安装](/getting_started)了 `gradio` Python 包。我们将使用一个预训练的 Keras 图像分类模型,因此您还应该安装了 `tensorflow`。 + +## 第一步 —— 设置图像分类模型 + +首先,我们需要一个图像分类模型。在本教程中,我们将使用一个预训练的 Mobile Net 模型,因为它可以从[Keras](https://keras.io/api/applications/mobilenet/)轻松下载。您也可以使用其他预训练模型或训练自己的模型。 + +```python +import tensorflow as tf + +inception_net = tf.keras.applications.MobileNetV2() +``` + +此行代码将使用 Keras 库自动下载 MobileNet 模型和权重。 + +## 第二步 —— 定义 `predict` 函数 + +接下来,我们需要定义一个函数,该函数接收*用户输入*作为参数(在本例中为图像),并返回预测结果。预测结果应以字典形式返回,其中键是类名,值是置信概率。我们将从这个[text 文件](https://git.io/JJkYN)中加载类名。 + +对于我们的预训练模型,函数将如下所示: + +```python +import requests + +# 从ImageNet下载可读性标签。 +response = requests.get("https://git.io/JJkYN") +labels = response.text.split("\n") + +def classify_image(inp): + inp = inp.reshape((-1, 224, 224, 3)) + inp = tf.keras.applications.mobilenet_v2.preprocess_input(inp) + prediction = inception_net.predict(inp).flatten() + confidences = {labels[i]: float(prediction[i]) for i in range(1000)} + return confidences +``` + +让我们来详细了解一下。该函数接受一个参数: + +- `inp`:输入图像的 `numpy` 数组 + +然后,函数添加一个批次维度,通过模型进行处理,并返回: + +- `confidences`:预测结果,以字典形式表示,其中键是类标签,值是置信概率 + +## 第三步 —— 创建 Gradio 界面 + +现在我们已经设置好了预测函数,我们可以围绕它创建一个 Gradio 界面。 + +在这种情况下,输入组件是一个拖放图像组件。要创建此输入组件,我们可以使用 `"gradio.inputs.Image"` 类,该类创建该组件并处理预处理以将其转换为 numpy 数组。我们将使用一个参数来实例化该类,该参数会自动将输入图像预处理为 224 像素 x224 像素的大小,这是 MobileNet 所期望的尺寸。 + +输出组件将是一个 `"label"`,它以美观的形式显示顶部标签。由于我们不想显示所有的 1,000 个类标签,所以我们将自定义它只显示前 3 个标签。 + +最后,我们还将添加一个 `examples` 参数,它允许我们使用一些预定义的示例预填充我们的接口。Gradio 的代码如下所示: + +```python +import gradio as gr + +gr.Interface(fn=classify_image, + inputs=gr.Image(shape=(224, 224)), + outputs=gr.Label(num_top_classes=3), + examples=["banana.jpg", "car.jpg"]).launch() +``` + +这将生成以下界面,您可以在浏览器中立即尝试(尝试上传您自己的示例!): + + + +--- + +完成!这就是构建图像分类器的 Web 演示所需的所有代码。如果您想与他人分享,请尝试在启动接口时设置 `share=True`! diff --git a/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-with-vision-transformers.md b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-with-vision-transformers.md new file mode 100644 index 0000000000000000000000000000000000000000..a1186da97bee03e9a7f6901d20085a93e0fb7121 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/04_integrating-other-frameworks/image-classification-with-vision-transformers.md @@ -0,0 +1,52 @@ +# Vision Transformers 图像分类 + +相关空间:https://huggingface.co/spaces/abidlabs/vision-transformer +标签:VISION, TRANSFORMERS, HUB + +## 简介 + +图像分类是计算机视觉中的重要任务。构建更好的分类器以确定图像中存在的对象是当前研究的热点领域,因为它在从人脸识别到制造质量控制等方面都有应用。 + +最先进的图像分类器基于 _transformers_ 架构,该架构最初在自然语言处理任务中很受欢迎。这种架构通常被称为 vision transformers (ViT)。这些模型非常适合与 Gradio 的*图像*输入组件一起使用,因此在本教程中,我们将构建一个使用 Gradio 进行图像分类的 Web 演示。我们只需用**一行 Python 代码**即可构建整个 Web 应用程序,其效果如下(试用一下示例之一!): + + + +让我们开始吧! + +### 先决条件 + +确保您已经[安装](/getting_started)了 `gradio` Python 包。 + +## 步骤 1 - 选择 Vision 图像分类模型 + +首先,我们需要一个图像分类模型。在本教程中,我们将使用[Hugging Face Model Hub](https://huggingface.co/models?pipeline_tag=image-classification)上的一个模型。该 Hub 包含数千个模型,涵盖了多种不同的机器学习任务。 + +在左侧边栏中展开 Tasks 类别,并选择我们感兴趣的“Image Classification”作为我们的任务。然后,您将看到 Hub 上为图像分类设计的所有模型。 + +在撰写时,最受欢迎的模型是 `google/vit-base-patch16-224`,该模型在分辨率为 224x224 像素的 ImageNet 图像上进行了训练。我们将在演示中使用此模型。 + +## 步骤 2 - 使用 Gradio 加载 Vision Transformer 模型 + +当使用 Hugging Face Hub 上的模型时,我们无需为演示定义输入或输出组件。同样,我们不需要关心预处理或后处理的细节。所有这些都可以从模型标签中自动推断出来。 + +除了导入语句外,我们只需要一行代码即可加载并启动演示。 + +我们使用 `gr.Interface.load()` 方法,并传入包含 `huggingface/` 的模型路径,以指定它来自 Hugging Face Hub。 + +```python +import gradio as gr + +gr.Interface.load( + "huggingface/google/vit-base-patch16-224", + examples=["alligator.jpg", "laptop.jpg"]).launch() +``` + +请注意,我们添加了一个 `examples` 参数,允许我们使用一些预定义的示例预填充我们的界面。 + +这将生成以下接口,您可以直接在浏览器中尝试。当您输入图像时,它会自动进行预处理并发送到 Hugging Face Hub API,通过模型处理,并以人类可解释的预测结果返回。尝试上传您自己的图像! + + + +--- + +完成!只需一行代码,您就建立了一个图像分类器的 Web 演示。如果您想与他人分享,请在 `launch()` 接口时设置 `share=True`。 diff --git a/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/01_connecting-to-a-database.md b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/01_connecting-to-a-database.md new file mode 100644 index 0000000000000000000000000000000000000000..2c3cc67d8dc7448e56092a5a6553ba259479de58 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/01_connecting-to-a-database.md @@ -0,0 +1,152 @@ +# 连接到数据库 + +相关空间:https://huggingface.co/spaces/gradio/chicago-bike-share-dashboard +标签:TABULAR, PLOTS + +## 介绍 + +本指南介绍如何使用 Gradio 连接您的应用程序到数据库。我们将会 +连接到在 AWS 上托管的 PostgreSQL 数据库,但 Gradio 对于您连接的数据库类型和托管位置没有任何限制。因此,只要您能编写 Python 代码来连接 +您的数据,您就可以使用 Gradio 在 Web 界面中显示它 💪 + +## 概述 + +我们将分析来自芝加哥的自行车共享数据。数据托管在 kaggle [这里](https://www.kaggle.com/datasets/evangower/cyclistic-bike-share?select=202203-divvy-tripdata.csv)。 +我们的目标是创建一个仪表盘,让我们的业务利益相关者能够回答以下问题: + +1. 电动自行车是否比普通自行车更受欢迎? +2. 哪些出发自行车站点最受欢迎? + +在本指南结束时,我们将拥有一个如下所示的功能齐全的应用程序: + + + +## 步骤 1 - 创建数据库 + +我们将在 Amazon 的 RDS 服务上托管我们的数据。如果还没有 AWS 账号,请创建一个 +并在免费层级上创建一个 PostgreSQL 数据库。 + +**重要提示**:如果您计划在 HuggingFace Spaces 上托管此演示,请确保数据库在 **8080** 端口上。Spaces +将阻止除端口 80、443 或 8080 之外的所有外部连接,如此[处所示](https://huggingface.co/docs/hub/spaces-overview#networking)。 +RDS 不允许您在 80 或 443 端口上创建 postgreSQL 实例。 + +创建完数据库后,从 Kaggle 下载数据集并将其上传到数据库中。 +为了演示的目的,我们只会上传 2022 年 3 月的数据。 + +## 步骤 2.a - 编写 ETL 代码 + +我们将查询数据库,按自行车类型(电动、标准或有码)进行分组,并获取总骑行次数。 +我们还将查询每个站点的出发骑行次数,并获取前 5 个。 + +然后,我们将使用 matplotlib 将查询结果可视化。 + +我们将使用 pandas 的[read_sql](https://pandas.pydata.org/docs/reference/api/pandas.read_sql.html) +方法来连接数据库。这需要安装 `psycopg2` 库。 + +为了连接到数据库,我们将指定数据库的用户名、密码和主机作为环境变量。 +这样可以通过避免将敏感信息以明文形式存储在应用程序文件中,使我们的应用程序更安全。 + +```python +import os +import pandas as pd +import matplotlib.pyplot as plt + +DB_USER = os.getenv("DB_USER") +DB_PASSWORD = os.getenv("DB_PASSWORD") +DB_HOST = os.getenv("DB_HOST") +PORT = 8080 +DB_NAME = "bikeshare" + +connection_string = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}?port={PORT}&dbname={DB_NAME}" + +def get_count_ride_type(): + df = pd.read_sql( + """ + SELECT COUNT(ride_id) as n, rideable_type + FROM rides + GROUP BY rideable_type + ORDER BY n DESC + """, + con=connection_string + ) + fig_m, ax = plt.subplots() + ax.bar(x=df['rideable_type'], height=df['n']) + ax.set_title("Number of rides by bycycle type") + ax.set_ylabel("Number of Rides") + ax.set_xlabel("Bicycle Type") + return fig_m + + +def get_most_popular_stations(): + + df = pd.read_sql( + """ + SELECT COUNT(ride_id) as n, MAX(start_station_name) as station + FROM RIDES + WHERE start_station_name is NOT NULL + GROUP BY start_station_id + ORDER BY n DESC + LIMIT 5 + """, + con=connection_string + ) + fig_m, ax = plt.subplots() + ax.bar(x=df['station'], height=df['n']) + ax.set_title("Most popular stations") + ax.set_ylabel("Number of Rides") + ax.set_xlabel("Station Name") + ax.set_xticklabels( + df['station'], rotation=45, ha="right", rotation_mode="anchor" + ) + ax.tick_params(axis="x", labelsize=8) + fig_m.tight_layout() + return fig_m +``` + +如果您在本地运行我们的脚本,可以像下面这样将凭据作为环境变量传递: + +```bash +DB_USER='username' DB_PASSWORD='password' DB_HOST='host' python app.py +``` + +## 步骤 2.c - 编写您的 gradio 应用程序 + +我们将使用两个单独的 `gr.Plot` 组件将我们的 matplotlib 图表并排显示在一起,使用 `gr.Row()`。 +因为我们已经在 `demo.load()` 事件触发器中封装了获取数据的函数, +我们的演示将在每次网页加载时从数据库**动态**获取最新数据。🪄 + +```python +import gradio as gr + +with gr.Blocks() as demo: + with gr.Row(): + bike_type = gr.Plot() + station = gr.Plot() + + demo.load(get_count_ride_type, inputs=None, outputs=bike_type) + demo.load(get_most_popular_stations, inputs=None, outputs=station) + +demo.launch() +``` + +## 步骤 3 - 部署 + +如果您运行上述代码,您的应用程序将在本地运行。 +您甚至可以通过将 `share=True` 参数传递给 `launch` 来获得一个临时共享链接。 + +但是如果您想要一个永久的部署解决方案呢? +让我们将我们的 Gradio 应用程序部署到免费的 HuggingFace Spaces 平台上。 + +如果您之前没有使用过 Spaces,请按照之前的指南[这里](/using_hugging_face_integrations)进行操作。 +您将需要将 `DB_USER`、`DB_PASSWORD` 和 `DB_HOST` 变量添加为 "Repo Secrets"。您可以在 " 设置 " 选项卡中进行此操作。 + +![secrets](/assets/guides/secrets.png) + +## 结论 + +恭喜你!您知道如何将您的 Gradio 应用程序连接到云端托管的数据库!☁️ + +我们的仪表板现在正在[Spaces](https://huggingface.co/spaces/gradio/chicago-bike-share-dashboard)上运行。 +完整代码在[这里](https://huggingface.co/spaces/gradio/chicago-bike-share-dashboard/blob/main/app.py) + +正如您所见,Gradio 使您可以连接到您的数据并以您想要的方式显示!🔥 diff --git a/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-dashboard-from-bigquery-data.md b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-dashboard-from-bigquery-data.md new file mode 100644 index 0000000000000000000000000000000000000000..0353964e632082f974c32916b8cf870ba3a5a86a --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-dashboard-from-bigquery-data.md @@ -0,0 +1,122 @@ +# 从 BigQuery 数据创建实时仪表盘 + +Tags: 表格 , 仪表盘 , 绘图 + +[Google BigQuery](https://cloud.google.com/bigquery) 是一个基于云的用于处理大规模数据集的服务。它是一个无服务器且高度可扩展的数据仓库解决方案,使用户能够使用类似 SQL 的查询分析数据。 + +在本教程中,我们将向您展示如何使用 `gradio` 在 Python 中查询 BigQuery 数据集并在实时仪表盘中显示数据。仪表板将如下所示: + + + +在本指南中,我们将介绍以下步骤: + +1. 设置 BigQuery 凭据 +2. 使用 BigQuery 客户端 +3. 构建实时仪表盘(仅需 _7 行 Python 代码_) + +我们将使用[纽约时报的 COVID 数据集](https://www.nytimes.com/interactive/2021/us/covid-cases.html),该数据集作为一个公共数据集可在 BigQuery 上使用。数据集名为 `covid19_nyt.us_counties`,其中包含有关美国各县 COVID 确诊病例和死亡人数的最新信息。 + +**先决条件**:本指南使用 [Gradio Blocks](../quickstart/#blocks-more-flexibility-and-control),因此请确保您熟悉 Blocks 类。 + +## 设置 BigQuery 凭据 + +要使用 Gradio 和 BigQuery,您需要获取您的 BigQuery 凭据,并将其与 [BigQuery Python 客户端](https://pypi.org/project/google-cloud-bigquery/) 一起使用。如果您已经拥有 BigQuery 凭据(作为 `.json` 文件),则可以跳过此部分。否则,您可以在几分钟内免费完成此操作。 + +1. 首先,登录到您的 Google Cloud 帐户,并转到 Google Cloud 控制台 (https://console.cloud.google.com/) + +2. 在 Cloud 控制台中,单击左上角的汉堡菜单,然后从菜单中选择“API 与服务”。如果您没有现有项目,则需要创建一个项目。 + +3. 然后,单击“+ 启用的 API 与服务”按钮,该按钮允许您为项目启用特定服务。搜索“BigQuery API”,单击它,然后单击“启用”按钮。如果您看到“管理”按钮,则表示 BigQuery 已启用,您已准备就绪。 + +4. 在“API 与服务”菜单中,单击“凭据”选项卡,然后单击“创建凭据”按钮。 + +5. 在“创建凭据”对话框中,选择“服务帐号密钥”作为要创建的凭据类型,并为其命名。还可以通过为其授予角色(例如“BigQuery 用户”)为服务帐号授予权限,从而允许您运行查询。 + +6. 在选择服务帐号后,选择“JSON”密钥类型,然后单击“创建”按钮。这将下载包含您凭据的 JSON 密钥文件到您的计算机。它的外观类似于以下内容: + +```json +{ + "type": "service_account", + "project_id": "your project", + "private_key_id": "your private key id", + "private_key": "private key", + "client_email": "email", + "client_id": "client id", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/email_id" +} +``` + +## 使用 BigQuery 客户端 + +获得凭据后,您需要使用 BigQuery Python 客户端使用您的凭据进行身份验证。为此,您需要在终端中运行以下命令安装 BigQuery Python 客户端: + +```bash +pip install google-cloud-bigquery[pandas] +``` + +您会注意到我们已安装了 pandas 插件,这对于将 BigQuery 数据集处理为 pandas 数据帧将非常有用。安装了客户端之后,您可以通过运行以下代码使用您的凭据进行身份验证: + +```py +from google.cloud import bigquery + +client = bigquery.Client.from_service_account_json("path/to/key.json") +``` + +完成凭据身份验证后,您现在可以使用 BigQuery Python 客户端与您的 BigQuery 数据集进行交互。 + +以下是一个示例函数,该函数在 BigQuery 中查询 `covid19_nyt.us_counties` 数据集,以显示截至当前日期的确诊人数最多的前 20 个县: + +```py +import numpy as np + +QUERY = ( + 'SELECT * FROM `bigquery-public-data.covid19_nyt.us_counties` ' + 'ORDER BY date DESC,confirmed_cases DESC ' + 'LIMIT 20') + +def run_query(): + query_job = client.query(QUERY) + query_result = query_job.result() + df = query_result.to_dataframe() + # Select a subset of columns + df = df[["confirmed_cases", "deaths", "county", "state_name"]] + # Convert numeric columns to standard numpy types + df = df.astype({"deaths": np.int64, "confirmed_cases": np.int64}) + return df +``` + +## 构建实时仪表盘 + +一旦您有了查询数据的函数,您可以使用 Gradio 库的 `gr.DataFrame` 组件以表格形式显示结果。这是一种检查数据并确保查询正确的有用方式。 + +以下是如何使用 `gr.DataFrame` 组件显示结果的示例。通过将 `run_query` 函数传递给 `gr.DataFrame`,我们指示 Gradio 在页面加载时立即运行该函数并显示结果。此外,您还可以传递关键字 `every`,以告知仪表板每小时刷新一次(60\*60 秒)。 + +```py +import gradio as gr + +with gr.Blocks() as demo: + gr.DataFrame(run_query, every=60*60) + +demo.queue().launch() # Run the demo using queuing +``` + +也许您想在我们的仪表盘中添加一个可视化效果。您可以使用 `gr.ScatterPlot()` 组件将数据可视化为散点图。这可以让您查看数据中不同变量(例如病例数和死亡数)之间的关系,并可用于探索数据和获取见解。同样,我们可以实时完成这一操作 +通过传递 `every` 参数。 + +以下是一个完整示例,展示了如何在显示数据时使用 `gr.ScatterPlot` 来进行可视化。 + +```py +import gradio as gr + +with gr.Blocks() as demo: + gr.Markdown("# 💉 Covid Dashboard (Updated Hourly)") + with gr.Row(): + gr.DataFrame(run_query, every=60*60) + gr.ScatterPlot(run_query, every=60*60, x="confirmed_cases", + y="deaths", tooltip="county", width=500, height=500) + +demo.queue().launch() # Run the demo with queuing enabled +``` diff --git a/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-dashboard-from-supabase-data.md b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-dashboard-from-supabase-data.md new file mode 100644 index 0000000000000000000000000000000000000000..b84d3998148d40c1bd9dd849cb3c0549d76530fe --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-dashboard-from-supabase-data.md @@ -0,0 +1,123 @@ +# 从 Supabase 数据创建仪表盘 + +Tags: TABULAR, DASHBOARD, PLOTS + +[Supabase](https://supabase.com/) 是一个基于云的开源后端,提供了 PostgreSQL 数据库、身份验证和其他有用的功能,用于构建 Web 和移动应用程序。在本教程中,您将学习如何从 Supabase 读取数据,并在 Gradio 仪表盘上以**实时**方式绘制数据。 + +**先决条件 :** 要开始,您需要一个免费的 Supabase 账户,您可以在此处注册:[https://app.supabase.com/](https://app.supabase.com/) + +在这个端到端指南中,您将学习如何: + +- 在 Supabase 中创建表 +- 使用 Supabase Python 客户端向 Supabase 写入数据 +- 使用 Gradio 在实时仪表盘中可视化数据 + +如果您已经在 Supabase 上有数据想要在仪表盘中可视化,您可以跳过前两个部分,直接到[可视化数据](#visualize-the-data-in-a-real-time-gradio-dashboard)! + +## 在 Supabase 中创建表 + +首先,我们需要一些要可视化的数据。根据这个[出色的指南](https://supabase.com/blog/loading-data-supabase-python),我们将创建一些虚假的商务数据,并将其放入 Supabase 中。 + +1\. 在 Supabase 中创建一个新项目。一旦您登录,点击 "New Project" 按钮 + +2\. 给您的项目命名并设置数据库密码。您还可以选择定价计划(对于我们来说,免费计划已足够!) + +3\. 在数据库启动时(可能需要多达 2 分钟),您将看到您的 API 密钥。 + +4\. 在左侧窗格中单击 "Table Editor"(表图标)以创建一个新表。我们将创建一个名为 `Product` 的单表,具有以下模式: + +
+ + + + + +
product_idint8
inventory_countint8
pricefloat8
product_namevarchar
+
+ +5\. 点击保存以保存表结构。 + +我们的表已经准备好了! + +## 将数据写入 Supabase + +下一步是向 Supabase 数据集中写入数据。我们将使用 Supabase Python 库来完成这个任务。 + +6\. 通过在终端中运行以下命令来安装 `supabase` 库: + +```bash +pip install supabase +``` + +7\. 获取项目 URL 和 API 密钥。点击左侧窗格上的设置(齿轮图标),然后点击 'API'。URL 列在项目 URL 框中,API 密钥列在项目 API 密钥(带有 `service_role`、`secret` 标签)中 + +8\. 现在,运行以下 Python 脚本将一些虚假数据写入表中(注意您需要在步骤 7 中放入 `SUPABASE_URL` 和 `SUPABASE_SECRET_KEY` 的值): + +```python +import supabase + +# 初始化Supabase客户端 +client = supabase.create_client('SUPABASE_URL', 'SUPABASE_SECRET_KEY') + +# 定义要写入的数据 +import random + +main_list = [] +for i in range(10): + value = {'product_id': i, + 'product_name': f"Item {i}", + 'inventory_count': random.randint(1, 100), + 'price': random.random()*100 + } + main_list.append(value) + +# 将数据写入表中 +data = client.table('Product').insert(main_list).execute() +``` + +返回 Supabase 仪表板并刷新页面,您将看到 10 行数据填充到 `Product` 表中! + +## 在实时 Gradio 仪表盘中可视化数据 + +最后,我们将使用相同的 `supabase` Python 库从 Supabase 数据集中读取数据,并使用 `gradio` 创建一个实时仪表盘。 + +注意:我们在本节中重复了某些步骤(比如创建 Supabase 客户端),以防您没有完成之前的部分。如第 7 步所述,您将需要数据库的项目 URL 和 API 密钥。 + +9\. 编写一个函数,从 `Product` 表加载数据并将其作为 pandas DataFrame 返回: + +import supabase + +```python +import supabase +import pandas as pd + +client = supabase.create_client('SUPABASE_URL', 'SUPABASE_SECRET_KEY') + +def read_data(): + response = client.table('Product').select("*").execute() + df = pd.DataFrame(response.data) + return df +``` + +10\. 使用两个条形图创建一个小的 Gradio 仪表盘,每分钟绘制所有项目的价格和库存量,并实时更新: + +```python +import gradio as gr + +with gr.Blocks() as dashboard: + with gr.Row(): + gr.BarPlot(read_data, x="product_id", y="price", title="价格", every=60) + gr.BarPlot(read_data, x="product_id", y="inventory_count", title="库存", every=60) + +dashboard.queue().launch() +``` + +请注意,通过将函数传递给 `gr.BarPlot()`,我们可以在网络应用加载时查询数据库(然后每 60 秒查询一次,因为有 `every` 参数)。您的最终仪表盘应如下所示: + + + +## 结论 + +就是这样!在本教程中,您学习了如何将数据写入 Supabase 数据集,然后读取该数据并将结果绘制为条形图。如果您更新 Supabase 数据库中的数据,您会注意到 Gradio 仪表盘将在一分钟内更新。 + +尝试在此示例中添加更多绘图和可视化(或使用不同的数据集),以构建一个更复杂的仪表盘! diff --git a/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-realtime-dashboard-from-google-sheets.md b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-realtime-dashboard-from-google-sheets.md new file mode 100644 index 0000000000000000000000000000000000000000..b26a36abe3b9b76fc35892da9c92c738968fe407 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/creating-a-realtime-dashboard-from-google-sheets.md @@ -0,0 +1,121 @@ +# 从 Google Sheets 创建实时仪表盘 + +Tags: TABULAR, DASHBOARD, PLOTS +[Google Sheets](https://www.google.com/sheets/about/) 是一种以电子表格形式存储表格数据的简便方法。借助 Gradio 和 pandas,可以轻松从公共或私有 Google Sheets 读取数据,然后显示数据或绘制数据。在本博文中,我们将构建一个小型 _real-time_ 仪表盘,该仪表盘在 Google Sheets 中的数据更新时进行更新。 +构建仪表盘本身只需要使用 Gradio 的 9 行 Python 代码,我们的最终仪表盘如下所示: + + +**先决条件**:本指南使用[Gradio Blocks](../quickstart/#blocks-more-flexibility-and-control),因此请确保您熟悉 Blocks 类。 +具体步骤略有不同,具体取决于您是使用公开访问还是私有 Google Sheet。我们将分别介绍这两种情况,所以让我们开始吧! + +## Public Google Sheets + +由于[`pandas` 库](https://pandas.pydata.org/)的存在,从公共 Google Sheet 构建仪表盘非常简单: + +1. 获取要使用的 Google Sheets 的网址。为此,只需进入 Google Sheets,单击右上角的“共享”按钮,然后单击“获取可共享链接”按钮。这将给您一个类似于以下示例的网址: + +```html +https://docs.google.com/spreadsheets/d/1UoKzzRzOCt-FXLLqDKLbryEKEgllGAQUEJ5qtmmQwpU/edit#gid=0 +``` + +2. 现在,修改此网址并使用它从 Google Sheets 读取数据到 Pandas DataFrame 中。 (在下面的代码中,用您的公开 Google Sheet 的网址替换 `URL` 变量): + +```python +import pandas as pd +URL = "https://docs.google.com/spreadsheets/d/1UoKzzRzOCt-FXLLqDKLbryEKEgllGAQUEJ5qtmmQwpU/edit#gid=0"csv_url = URL.replace('/edit#gid=', '/export?format=csv&gid=') +def get_data(): + return pd.read_csv(csv_url) +``` + +3. 数据查询是一个函数,这意味着可以使用 `gr.DataFrame` 组件实时显示或使用 `gr.LinePlot` 组件实时绘制数据(当然,根据数据的不同,可能需要不同的绘图方法)。只需将函数传递给相应的组件,并根据组件刷新的频率(以秒为单位)设置 `every` 参数。以下是 Gradio 代码: + +```python +import gradio as gr + +with gr.Blocks() as demo: + gr.Markdown("# 📈 Real-Time Line Plot") + with gr.Row(): + with gr.Column(): + gr.DataFrame(get_data, every=5) + with gr.Column(): + gr.LinePlot(get_data, every=5, x="Date", y="Sales", y_title="Sales ($ millions)", overlay_point=True, width=500, height=500) + +demo.queue().launch() # Run the demo with queuing enabled +``` + +到此为止!您现在拥有一个仪表盘,每 5 秒刷新一次,从 Google Sheets 中获取数据。 + +## 私有 Google Sheets + +对于私有 Google Sheets,流程需要更多的工作量,但并不多!关键区别在于,现在您必须经过身份验证,以授权访问私有 Google Sheets。 + +### 身份验证 + +要进行身份验证,需从 Google Cloud 获取凭据。以下是[如何设置 Google Cloud 凭据](https://developers.google.com/workspace/guides/create-credentials): + +1. 首先,登录您的 Google Cloud 帐户并转到 Google Cloud 控制台(https://console.cloud.google.com/) +2. 在 Cloud 控制台中,单击左上角的汉堡菜单,然后从菜单中选择“API 和服务”。如果您没有现有项目,则需要创建一个。 +3. 然后,点击“+ 启用的 API 和服务”按钮,允许您为项目启用特定的服务。搜索“Google Sheets API”,点击它,然后单击“启用”按钮。如果看到“管理”按钮,则表示 Google Sheets 已启用,并且您已准备就绪。 +4. 在 API 和服务菜单中,点击“凭据”选项卡,然后点击“创建凭据”按钮。 +5. 在“创建凭据”对话框中,选择“服务帐号密钥”作为要创建的凭据类型,并为其命名。**记下服务帐号的电子邮件地址** +6. 在选择服务帐号之后,选择“JSON”密钥类型,然后点击“创建”按钮。这将下载包含您凭据的 JSON 密钥文件到您的计算机。文件类似于以下示例: + +```json +{ + "type": "service_account", + "project_id": "your project", + "private_key_id": "your private key id", + "private_key": "private key", + "client_email": "email", + "client_id": "client id", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/email_id" +} +``` + +### 查询 + +在获得凭据的 `.json` 文件后,可以按照以下步骤查询您的 Google Sheet: + +1. 单击 Google Sheet 右上角的“共享”按钮。使用身份验证子部分第 5 步的服务的电子邮件地址共享 Google Sheets(此步骤很重要!)。然后单击“获取可共享链接”按钮。这将给您一个类似于以下示例的网址: + +```html +https://docs.google.com/spreadsheets/d/1UoKzzRzOCt-FXLLqDKLbryEKEgllGAQUEJ5qtmmQwpU/edit#gid=0 +``` + +2. 安装 [`gspread` 库](https://docs.gspread.org/en/v5.7.0/),通过在终端运行以下命令使 Python 中使用 [Google Sheets API](https://developers.google.com/sheets/api/guides/concepts) 更加简单:`pip install gspread` +3. 编写一个函数来从 Google Sheet 中加载数据,如下所示(用您的私有 Google Sheet 的 URL 替换 `URL` 变量): + +```python +import gspreadimport pandas as pd +# 与 Google 进行身份验证并获取表格URL = 'https://docs.google.com/spreadsheets/d/1_91Vps76SKOdDQ8cFxZQdgjTJiz23375sAT7vPvaj4k/edit#gid=0' +gc = gspread.service_account("path/to/key.json")sh = gc.open_by_url(URL)worksheet = sh.sheet1 +def get_data(): + values = worksheet.get_all_values() + df = pd.DataFrame(values[1:], columns=values[0]) + return df +``` + +4\. 数据查询是一个函数,这意味着可以使用 `gr.DataFrame` 组件实时显示数据,或使用 `gr.LinePlot` 组件实时绘制数据(当然,根据数据的不同,可能需要使用不同的图表)。要实现这一点,只需将函数传递给相应的组件,并根据需要设置 `every` 参数来确定组件刷新的频率(以秒为单位)。以下是 Gradio 代码: + +```python +import gradio as gr + +with gr.Blocks() as demo: + gr.Markdown("# 📈 实时折线图") + with gr.Row(): + with gr.Column(): + gr.DataFrame(get_data, every=5) + with gr.Column(): + gr.LinePlot(get_data, every=5, x="日期", y="销售额", y_title="销售额(百万美元)", overlay_point=True, width=500, height=500) + +demo.queue().launch() # 启动带有排队功能的演示 +``` + +现在你有一个每 5 秒刷新一次的仪表盘,可以从你的 Google 表格中获取数据。 + +## 结论 + +就是这样!只需几行代码,你就可以使用 `gradio` 和其他库从公共或私有的 Google 表格中读取数据,然后在实时仪表盘中显示和绘制数据。 diff --git a/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/plot-component-for-maps.md b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/plot-component-for-maps.md new file mode 100644 index 0000000000000000000000000000000000000000..bd48d87613b21a5786c6ba4163e4d30bf299441e --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/plot-component-for-maps.md @@ -0,0 +1,111 @@ +# 如何使用地图组件绘制图表 + +Related spaces: +Tags: PLOTS, MAPS + +## 简介 + +本指南介绍如何使用 Gradio 的 `Plot` 组件在地图上绘制地理数据。Gradio 的 `Plot` 组件可以与 Matplotlib、Bokeh 和 Plotly 一起使用。在本指南中,我们将使用 Plotly 进行操作。Plotly 可以让开发人员轻松创建各种地图来展示他们的地理数据。点击[这里](https://plotly.com/python/maps/)查看一些示例。 + +## 概述 + +我们将使用纽约市的 Airbnb 数据集,该数据集托管在 kaggle 上,点击[这里](https://www.kaggle.com/datasets/dgomonov/new-york-city-airbnb-open-data)。我已经将其上传到 Hugging Face Hub 作为一个数据集,方便使用和下载,点击[这里](https://huggingface.co/datasets/gradio/NYC-Airbnb-Open-Data)。使用这些数据,我们将在地图上绘制 Airbnb 的位置,并允许基于价格和位置进行筛选。下面是我们将要构建的演示。 ⚡️ + +$demo_map_airbnb + +## 步骤 1-加载 CSV 数据 💾 + +让我们首先从 Hugging Face Hub 加载纽约市的 Airbnb 数据。 + +```python +from datasets import load_dataset + +dataset = load_dataset("gradio/NYC-Airbnb-Open-Data", split="train") +df = dataset.to_pandas() + +def filter_map(min_price, max_price, boroughs): + new_df = df[(df['neighbourhood_group'].isin(boroughs)) & + (df['price'] > min_price) & (df['price'] < max_price)] + names = new_df["name"].tolist() + prices = new_df["price"].tolist() + text_list = [(names[i], prices[i]) for i in range(0, len(names))] +``` + +在上面的代码中,我们先将 CSV 数据加载到一个 pandas dataframe 中。让我们首先定义一个函数,这将作为 gradio 应用程序的预测函数。该函数将接受最低价格、最高价格范围和筛选结果地区的列表作为参数。我们可以使用传入的值 (`min_price`、`max_price` 和地区列表) 来筛选数据框并创建 `new_df`。接下来,我们将创建包含每个 Airbnb 的名称和价格的 `text_list`,以便在地图上使用作为标签。 + +## 步骤 2-地图图表 🌐 + +Plotly 使得处理地图变得很容易。让我们看一下下面的代码,了解如何创建地图图表。 + +```python +import plotly.graph_objects as go + +fig = go.Figure(go.Scattermapbox( + customdata=text_list, + lat=new_df['latitude'].tolist(), + lon=new_df['longitude'].tolist(), + mode='markers', + marker=go.scattermapbox.Marker( + size=6 + ), + hoverinfo="text", + hovertemplate='Name: %{customdata[0]}
Price: $%{customdata[1]}' + )) + +fig.update_layout( + mapbox_style="open-street-map", + hovermode='closest', + mapbox=dict( + bearing=0, + center=go.layout.mapbox.Center( + lat=40.67, + lon=-73.90 + ), + pitch=0, + zoom=9 + ), +) +``` + +上面的代码中,我们通过传入经纬度列表来创建一个散点图。我们还传入了名称和价格的自定义数据,以便在鼠标悬停在每个标记上时显示额外的信息。接下来,我们使用 `update_layout` 来指定其他地图设置,例如缩放和居中。 + +有关使用 Mapbox 和 Plotly 创建散点图的更多信息,请点击[这里](https://plotly.com/python/scattermapbox/)。 + +## 步骤 3-Gradio 应用程序 ⚡️ + +我们将使用两个 `gr.Number` 组件和一个 `gr.CheckboxGroup` 组件,允许用户指定价格范围和地区位置。然后,我们将使用 `gr.Plot` 组件作为我们之前创建的 Plotly + Mapbox 地图的输出。 + +```python +with gr.Blocks() as demo: + with gr.Column(): + with gr.Row(): + min_price = gr.Number(value=250, label="Minimum Price") + max_price = gr.Number(value=1000, label="Maximum Price") + boroughs = gr.CheckboxGroup(choices=["Queens", "Brooklyn", "Manhattan", "Bronx", "Staten Island"], value=["Queens", "Brooklyn"], label="Select Boroughs:") + btn = gr.Button(value="Update Filter") + map = gr.Plot() + demo.load(filter_map, [min_price, max_price, boroughs], map) + btn.click(filter_map, [min_price, max_price, boroughs], map) +``` + +我们使用 `gr.Column` 和 `gr.Row` 布局这些组件,并为演示加载时和点击 " 更新筛选 " 按钮时添加了事件触发器,以触发地图更新新的筛选条件。 + +以下是完整演示代码: + +$code_map_airbnb + +## 步骤 4-部署 Deployment 🤗 + +如果你运行上面的代码,你的应用程序将在本地运行。 +如果要获取临时共享链接,可以将 `share=True` 参数传递给 `launch`。 + +但如果你想要一个永久的部署解决方案呢? +让我们将我们的 Gradio 应用程序部署到免费的 HuggingFace Spaces 平台。 + +如果你以前没有使用过 Spaces,请按照之前的指南[这里](/using_hugging_face_integrations)。 + +## 结论 🎉 + +你已经完成了!这是构建地图演示所需的所有代码。 + +链接到演示:[地图演示](https://huggingface.co/spaces/gradio/map_airbnb)和[完整代码](https://huggingface.co/spaces/gradio/map_airbnb/blob/main/run.py)(在 Hugging Face Spaces) diff --git a/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/using-gradio-for-tabular-workflows.md b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/using-gradio-for-tabular-workflows.md new file mode 100644 index 0000000000000000000000000000000000000000..4ca1b3c688d753b653c488e6cbc8099780a5cb0d --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/05_tabular-data-science-and-plots/using-gradio-for-tabular-workflows.md @@ -0,0 +1,103 @@ +## 使用 Gradio 进行表格数据科学工作流 + +Related spaces: https://huggingface.co/spaces/scikit-learn/gradio-skops-integration,https://huggingface.co/spaces/scikit-learn/tabular-playground,https://huggingface.co/spaces/merve/gradio-analysis-dashboard + +## 介绍 + +表格数据科学是机器学习中应用最广泛的领域,涉及的问题从客户分割到流失预测不等。在表格数据科学工作流的各个阶段中,将工作内容传达给利益相关者或客户可能很麻烦,这会阻碍数据科学家专注于重要事项,如数据分析和模型构建。数据科学家可能会花费数小时构建一个接受 DataFrame 并返回图表、预测或数据集中的聚类图的仪表板。在本指南中,我们将介绍如何使用 `gradio` 改进您的数据科学工作流程。我们还将讨论如何使用 `gradio` 和[skops](https://skops.readthedocs.io/en/stable/)一行代码即可构建界面! + +### 先决条件 + +确保您已经[安装](/getting_started)了 `gradio` Python 软件包。 + +## 让我们创建一个简单的界面! + +我们将看一下如何创建一个简单的界面,该界面根据产品信息预测故障。 + +```python +import gradio as gr +import pandas as pd +import joblib +import datasets + + +inputs = [gr.Dataframe(row_count = (2, "dynamic"), col_count=(4,"dynamic"), label="Input Data", interactive=1)] + +outputs = [gr.Dataframe(row_count = (2, "dynamic"), col_count=(1, "fixed"), label="Predictions", headers=["Failures"])] + +model = joblib.load("model.pkl") + +# we will give our dataframe as example +df = datasets.load_dataset("merve/supersoaker-failures") +df = df["train"].to_pandas() + +def infer(input_dataframe): + return pd.DataFrame(model.predict(input_dataframe)) + +gr.Interface(fn = infer, inputs = inputs, outputs = outputs, examples = [[df.head(2)]]).launch() +``` + +让我们来解析上述代码。 + +- `fn`:推理函数,接受输入数据帧并返回预测结果。 +- `inputs`:我们使用 `Dataframe` 组件作为输入。我们将输入定义为具有 2 行 4 列的数据帧,最初的数据帧将呈现出上述形状的空数据帧。当将 `row_count` 设置为 `dynamic` 时,不必依赖于正在输入的数据集来预定义组件。 +- `outputs`:用于存储输出的数据帧组件。该界面可以接受单个或多个样本进行推断,并在一列中为每个样本返回 0 或 1,因此我们将 `row_count` 设置为 2,`col_count` 设置为 1。`headers` 是由数据帧的列名组成的列表。 +- `examples`:您可以通过拖放 CSV 文件或通过示例传递 pandas DataFrame,界面会自动获取其标题。 + +现在我们将为简化版数据可视化仪表板创建一个示例。您可以在相关空间中找到更全面的版本。 + + + +```python +import gradio as gr +import pandas as pd +import datasets +import seaborn as sns +import matplotlib.pyplot as plt + +df = datasets.load_dataset("merve/supersoaker-failures") +df = df["train"].to_pandas() +df.dropna(axis=0, inplace=True) + +def plot(df): + plt.scatter(df.measurement_13, df.measurement_15, c = df.loading,alpha=0.5) + plt.savefig("scatter.png") + df['failure'].value_counts().plot(kind='bar') + plt.savefig("bar.png") + sns.heatmap(df.select_dtypes(include="number").corr()) + plt.savefig("corr.png") + plots = ["corr.png","scatter.png", "bar.png"] + return plots + +inputs = [gr.Dataframe(label="Supersoaker Production Data")] +outputs = [gr.Gallery(label="Profiling Dashboard", columns=(1,3))] + +gr.Interface(plot, inputs=inputs, outputs=outputs, examples=[df.head(100)], title="Supersoaker Failures Analysis Dashboard").launch() +``` + + + +我们将使用与训练模型相同的数据集,但这次我们将创建一个可视化仪表板以展示它。 + +- `fn`:根据数据创建图表的函数。 +- `inputs`:我们使用了与上述相同的 `Dataframe` 组件。 +- `outputs`:我们使用 `Gallery` 组件来存放我们的可视化结果。 +- `examples`:我们将数据集本身作为示例。 + +## 使用 skops 一行代码轻松加载表格数据界面 + +`skops` 是一个构建在 `huggingface_hub` 和 `sklearn` 之上的库。通过最新的 `gradio` 集成,您可以使用一行代码构建表格数据界面! + +```python +import gradio as gr + +# 标题和描述是可选的 +title = "Supersoaker产品缺陷预测" +description = "该模型预测Supersoaker生产线故障。在下面的数据帧组件中,您可以拖放数据集的任意切片或自行编辑值。" + +gr.Interface.load("huggingface/scikit-learn/tabular-playground", title=title, description=description).launch() +``` + + + +使用 `skops` 将 `sklearn` 模型推送到 Hugging Face Hub 时,会包含一个包含示例输入和列名的 `config.json` 文件,解决的任务类型是 `tabular-classification` 或 `tabular-regression`。根据任务类型,`gradio` 构建界面并使用列名和示例输入来构建它。您可以[参考 skops 在 Hub 上托管模型的文档](https://skops.readthedocs.io/en/latest/auto_examples/plot_hf_hub.html#sphx-glr-auto-examples-plot-hf-hub-py)来了解如何使用 `skops` 将模型推送到 Hub。 diff --git a/testbed/gradio-app__gradio/guides/cn/06_client-libraries/01_getting-started-with-the-python-client.md b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/01_getting-started-with-the-python-client.md new file mode 100644 index 0000000000000000000000000000000000000000..d17f0b1fa4193c4d98e1d0e25d02fdcae55a23c5 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/01_getting-started-with-the-python-client.md @@ -0,0 +1,260 @@ +# 使用 Gradio Python 客户端入门 + +Tags: CLIENT, API, SPACES + +Gradio Python 客户端使得将任何 Gradio 应用程序作为 API 使用变得非常容易。例如,考虑一下从麦克风录制的[Whisper 音频文件](https://huggingface.co/spaces/abidlabs/whisper)的转录。 + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/whisper-screenshot.jpg) + +使用 `gradio_client` 库,我们可以轻松地将 Gradio 用作 API,以编程方式转录音频文件。 + +下面是完成此操作的整个代码: + +```python +from gradio_client import Client + +client = Client("abidlabs/whisper") +client.predict("audio_sample.wav") + +>> "这是Whisper语音识别模型的测试。" +``` + +Gradio 客户端适用于任何托管在 Hugging Face Spaces 上的 Gradio 应用程序,无论是图像生成器、文本摘要生成器、有状态聊天机器人、税金计算器还是其他任何应用程序!Gradio 客户端主要用于托管在[Hugging Face Spaces](https://hf.space)上的应用程序,但你的应用程序可以托管在任何地方,比如你自己的服务器。 + +**先决条件**:要使用 Gradio 客户端,你不需要详细了解 `gradio` 库。但是,了解 Gradio 的输入和输出组件的概念会有所帮助。 + +## 安装 + +如果你已经安装了最新版本的 `gradio`,那么 `gradio_client` 就作为依赖项包含在其中。 + +否则,可以使用 pip(或 pip3)安装轻量级的 `gradio_client` 包,并且已经测试可以在 Python 3.9 或更高版本上运行: + +```bash +$ pip install gradio_client +``` + +## 连接到运行中的 Gradio 应用程序 + +首先创建一个 `Client` 对象,并将其连接到运行在 Hugging Face Spaces 上或其他任何地方的 Gradio 应用程序。 + +## 连接到 Hugging Face 空间 + +```python +from gradio_client import Client + +client = Client("abidlabs/en2fr") # 一个将英文翻译为法文的Space +``` + +你还可以通过在 `hf_token` 参数中传递你的 HF 令牌来连接到私有空间。你可以在这里获取你的 HF 令牌:https://huggingface.co/settings/tokens + +```python +from gradio_client import Client + +client = Client("abidlabs/my-private-space", hf_token="...") +``` + +## 复制空间以供私人使用 + +虽然你可以将任何公共空间用作 API,但如果你发出太多请求,你可能会受到 Hugging Face 的频率限制。要无限制地使用一个空间,只需将其复制以创建一个私有空间,然后可以根据需要进行多个请求! + +`gradio_client` 包括一个类方法:`Client.duplicate()`,使这个过程变得简单(你需要传递你的[Hugging Face 令牌](https://huggingface.co/settings/tokens)或使用 Hugging Face CLI 登录): + +```python +import os +from gradio_client import Client + +HF_TOKEN = os.environ.get("HF_TOKEN") + +client = Client.duplicate("abidlabs/whisper", hf_token=HF_TOKEN) +client.predict("audio_sample.wav") + +>> "This is a test of the whisper speech recognition model." +``` + +> > " 这是 Whisper 语音识别模型的测试。" + +如果之前已复制了一个空间,重新运行 `duplicate()` 将*不会*创建一个新的空间。相反,客户端将连接到之前创建的空间。因此,多次运行 `Client.duplicate()` 方法是安全的。 + +**注意:** 如果原始空间使用了 GPU,你的私有空间也将使用 GPU,并且你的 Hugging Face 账户将根据 GPU 的价格计费。为了降低费用,在 1 小时没有活动后,你的空间将自动休眠。你还可以使用 `duplicate()` 的 `hardware` 参数来设置硬件。 + +## 连接到通用 Gradio 应用程序 + +如果你的应用程序运行在其他地方,只需提供完整的 URL,包括 "http://" 或 "https://"。下面是一个在共享 URL 上运行的 Gradio 应用程序进行预测的示例: + +```python +from gradio_client import Client + +client = Client("https://bec81a83-5b5c-471e.gradio.live") +``` + +## 检查 API 端点 + +一旦连接到 Gradio 应用程序,可以通过调用 `Client.view_api()` 方法查看可用的 API 端点。对于 Whisper 空间,我们可以看到以下信息: + +```bash +Client.predict() Usage Info +--------------------------- +Named API endpoints: 1 + + - predict(input_audio, api_name="/predict") -> value_0 + Parameters: + - [Audio] input_audio: str (filepath or URL) + Returns: + - [Textbox] value_0: str (value) +``` + +这显示了在此空间中有 1 个 API 端点,并显示了如何使用 API 端点进行预测:我们应该调用 `.predict()` 方法(我们将在下面探讨),提供类型为 `str` 的参数 `input_audio`,它是一个`文件路径或 URL`。 + +我们还应该提供 `api_name='/predict'` 参数给 `predict()` 方法。虽然如果一个 Gradio 应用程序只有一个命名的端点,这不是必需的,但它允许我们在单个应用程序中调用不同的端点(如果它们可用)。如果一个应用程序有无名的 API 端点,可以通过运行 `.view_api(all_endpoints=True)` 来显示它们。 + +## 进行预测 + +进行预测的最简单方法是只需使用相应的参数调用 `.predict()` 函数: + +```python +from gradio_client import Client + +client = Client("abidlabs/en2fr", api_name='/predict') +client.predict("Hello") + +>> Bonjour +``` + +如果有多个参数,那么你应该将它们作为单独的参数传递给 `.predict()`,就像这样: + +````python +from gradio_client import Client + +client = Client("gradio/calculator") +client.predict(4, "add", 5) + +>> 9.0 + + +对于某些输入,例如图像,你应该传递文件的文件路径或URL。同样,对应的输出类型,你将获得一个文件路径或URL。 + + +```python +from gradio_client import Client + +client = Client("abidlabs/whisper") +client.predict("https://audio-samples.github.io/samples/mp3/blizzard_unconditional/sample-0.mp3") + +>> "My thought I have nobody by a beauty and will as you poured. Mr. Rochester is serve in that so don't find simpus, and devoted abode, to at might in a r—" + +```` + +## 异步运行任务(Running jobs asynchronously) + +应注意`.predict()`是一个*阻塞*操作,因为它在返回预测之前等待操作完成。 + +在许多情况下,直到你需要预测结果之前,你最好让作业在后台运行。你可以通过使用`.submit()`方法创建一个`Job`实例,然后稍后调用`.result()`在作业上获取结果。例如: + +```python +from gradio_client import Client + +client = Client(space="abidlabs/en2fr") +job = client.submit("Hello", api_name="/predict") # 这不是阻塞的 + +# 做其他事情 + +job.result() # 这是阻塞的 + +>> Bonjour +``` + +## 添加回调 (Adding callbacks) + +或者,可以添加一个或多个回调来在作业完成后执行操作,像这样: + +```python +from gradio_client import Client + +def print_result(x): + print(" 翻译的结果是:{x}") + +client = Client(space="abidlabs/en2fr") + +job = client.submit("Hello", api_name="/predict", result_callbacks=[print_result]) + +# 做其他事情 + +>> 翻译的结果是:Bonjour + +``` + +## 状态 (Status) + +`Job`对象还允许您通过调用`.status()`方法获取运行作业的状态。这将返回一个`StatusUpdate`对象,具有以下属性:`code`(状态代码,其中之一表示状态的一组定义的字符串。参见`utils.Status`类)、`rank`(此作业在队列中的当前位置)、`queue_size`(总队列大小)、`eta`(此作业将完成的预计时间)、`success`(表示作业是否成功完成的布尔值)和`time`(生成状态的时间)。 + +```py +from gradio_client import Client + +client = Client(src="gradio/calculator") +job = client.submit(5, "add", 4, api_name="/predict") +job.status() + +>> +``` + +_注意_:`Job`类还有一个`.done()`实例方法,返回一个布尔值,指示作业是否已完成。 + +## 取消作业 (Cancelling Jobs) + +`Job`类还有一个`.cancel()`实例方法,取消已排队但尚未开始的作业。例如,如果你运行: + +```py +client = Client("abidlabs/whisper") +job1 = client.submit("audio_sample1.wav") +job2 = client.submit("audio_sample2.wav") +job1.cancel() # 将返回 False,假设作业已开始 +job2.cancel() # 将返回 True,表示作业已取消 +``` + +如果第一个作业已开始处理,则它将不会被取消。如果第二个作业尚未开始,则它将成功取消并从队列中删除。 + +## 生成器端点 (Generator Endpoints) + +某些Gradio API端点不返回单个值,而是返回一系列值。你可以随时从这样的生成器端点获取返回的一系列值,方法是运行`job.outputs()`: + +```py +from gradio_client import Client + +client = Client(src="gradio/count_generator") +job = client.submit(3, api_name="/count") +while not job.done(): + time.sleep(0.1) +job.outputs() + +>> ['0', '1', '2'] +``` + +请注意,在生成器端点上运行`job.result()`只会获得端点返回的*第一个*值。 + +`Job`对象还是可迭代的,这意味着您可以使用它按照从端点返回的结果逐个显示生成器函数的结果。以下是使用`Job`作为生成器的等效示例: + +```py +from gradio_client import Client + +client = Client(src="gradio/count_generator") +job = client.submit(3, api_name="/count") + +for o in job: + print(o) + +>> 0 +>> 1 +>> 2 +``` + +你还可以取消具有迭代输出的作业,在这种情况下,作业将在当前迭代完成运行后完成。 + +```py +from gradio_client import Client +import time + +client = Client("abidlabs/test-yield") +job = client.submit("abcdef") +time.sleep(3) +job.cancel() # 作业在运行 2 个迭代后取消 +``` diff --git a/testbed/gradio-app__gradio/guides/cn/06_client-libraries/02_getting-started-with-the-js-client.md b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/02_getting-started-with-the-js-client.md new file mode 100644 index 0000000000000000000000000000000000000000..6c34eb82421042bc85fb1cc95db4205df351e8eb --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/02_getting-started-with-the-js-client.md @@ -0,0 +1,267 @@ +# 使用Gradio JavaScript客户端快速入门 + +Tags: CLIENT, API, SPACES + +Gradio JavaScript客户端使得使用任何Gradio应用作为API非常简单。例如,考虑一下这个[从麦克风录音的Hugging Face Space,用于转录音频文件](https://huggingface.co/spaces/abidlabs/whisper)。 + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/whisper-screenshot.jpg) + +使用`@gradio/client`库,我们可以轻松地以编程方式使用Gradio作为API来转录音频文件。 + +以下是完成此操作的完整代码: + +```js +import { client } from "@gradio/client"; + +const response = await fetch( + "https://github.com/audio-samples/audio-samples.github.io/raw/master/samples/wav/ted_speakers/SalmanKhan/sample-1.wav" +); +const audio_file = await response.blob(); + +const app = await client("abidlabs/whisper"); +const transcription = await app.predict("/predict", [audio_file]); + +console.log(transcription.data); +// [ "I said the same phrase 30 times." ] +``` + +Gradio客户端适用于任何托管的Gradio应用,无论是图像生成器、文本摘要生成器、有状态的聊天机器人、税收计算器还是其他任何应用!Gradio客户端通常与托管在[Hugging Face Spaces](https://hf.space)上的应用一起使用,但您的应用可以托管在任何地方,比如您自己的服务器。 + +**先决条件**:要使用Gradio客户端,您不需要深入了解`gradio`库的细节。但是,熟悉Gradio的输入和输出组件的概念会有所帮助。 + +## 安装 + +可以使用您选择的软件包管理器从npm注册表安装轻量级的`@gradio/client`包,并支持18及以上的Node版本: + +```bash +npm i @gradio/client +``` + +## 连接到正在运行的Gradio应用 + +首先,通过实例化`client`对象并将其连接到在Hugging Face Spaces或任何其他位置运行的Gradio应用来建立连接。 + +## 连接到Hugging Face Space + +```js +import { client } from "@gradio/client"; + +const app = client("abidlabs/en2fr"); // 一个从英语翻译为法语的 Space +``` + +您还可以通过在options参数的`hf_token`属性中传入您的HF token来连接到私有Spaces。您可以在此处获取您的HF token:https://huggingface.co/settings/tokens + +```js +import { client } from "@gradio/client"; + +const app = client("abidlabs/my-private-space", { hf_token="hf_..." }) +``` + +## 为私人使用复制一个Space + +虽然您可以将任何公共Space用作API,但是如果您发出的请求过多,Hugging Face可能会对您进行速率限制。为了无限制使用Space,只需复制Space以创建私有Space,然后使用它来进行任意数量的请求! + +`@gradio/client`还导出了另一个函数`duplicate`,以使此过程变得简单(您将需要传入您的[Hugging Face token](https://huggingface.co/settings/tokens))。 + +`duplicate`与`client`几乎相同,唯一的区别在于底层实现: + +```js +import { client } from "@gradio/client"; + +const response = await fetch( + "https://audio-samples.github.io/samples/mp3/blizzard_unconditional/sample-0.mp3" +); +const audio_file = await response.blob(); + +const app = await duplicate("abidlabs/whisper", { hf_token: "hf_..." }); +const transcription = app.predict("/predict", [audio_file]); +``` + +如果您之前复制过一个Space,则重新运行`duplicate`不会创建一个新的Space。而是客户端将连接到先前创建的Space。因此,可以安全地多次使用相同的Space重新运行`duplicate`方法。 + +**注意:**如果原始Space使用了GPU,您的私有Space也将使用GPU,并且将根据GPU的价格向您的Hugging Face账户计费。为了最大程度地减少费用,在5分钟不活动后,您的Space将自动进入休眠状态。您还可以使用`duplicate`的options对象的`hardware`和`timeout`属性来设置硬件,例如: + +```js +import { client } from "@gradio/client"; + +const app = await duplicate("abidlabs/whisper", { + hf_token: "hf_...", + timeout: 60, + hardware: "a10g-small" +}); +``` + +## 连接到通用的Gradio应用 + +如果您的应用程序在其他地方运行,只需提供完整的URL,包括"http://"或"https://"。以下是向运行在共享URL上的Gradio应用进行预测的示例: + +```js +import { client } from "@gradio/client"; + +const app = client("https://bec81a83-5b5c-471e.gradio.live"); +``` + +## 检查API端点 + +一旦连接到Gradio应用程序,可以通过调用`client`的`view_api`方法来查看可用的API端点。 + +对于Whisper Space,我们可以这样做: + +```js +import { client } from "@gradio/client"; + +const app = await client("abidlabs/whisper"); + +const app_info = await app.view_info(); + +console.log(app_info); +``` + +然后我们会看到以下内容: + +```json +{ + "named_endpoints": { + "/predict": { + "parameters": [ + { + "label": "text", + "component": "Textbox", + "type": "string" + } + ], + "returns": [ + { + "label": "output", + "component": "Textbox", + "type": "string" + } + ] + } + }, + "unnamed_endpoints": {} +} +``` + +这告诉我们该Space中有1个API端点,并显示了如何使用API端点进行预测:我们应该调用`.predict()`方法(下面将进行更多探索),并提供类型为`string`的参数`input_audio`,它是指向文件的URL。 + +我们还应该提供`api_name='/predict'`参数给`predict()`方法。虽然如果一个Gradio应用只有1个命名的端点,这不是必需的,但它可以允许我们在单个应用中调用不同的端点。如果应用有未命名的API端点,可以通过运行`.view_api(all_endpoints=True)`来显示它们。 + +## 进行预测 + +进行预测的最简单方法就是使用适当的参数调用`.predict()`方法: + +```js +import { client } from "@gradio/client"; + +const app = await client("abidlabs/en2fr"); +const result = await app.predict("/predict", ["Hello"]); +``` + +如果有多个参数,您应该将它们作为一个数组传递给`.predict()`,像这样: + +```js +import { client } from "@gradio/client"; + +const app = await client("gradio/calculator"); +const result = await app.predict("/predict", [4, "add", 5]); +``` + +对于某些输入,例如图像,您应该根据所需要的方便程度传入`Buffer`、`Blob`或`File`。在Node.js中,可以使用`Buffer`或`Blob`;在浏览器环境中,可以使用`Blob`或`File`。 + +```js +import { client } from "@gradio/client"; + +const response = await fetch( + "https://audio-samples.github.io/samples/mp3/blizzard_unconditional/sample-0.mp3" +); +const audio_file = await response.blob(); + +const app = await client("abidlabs/whisper"); +const result = await client.predict("/predict", [audio_file]); +``` + +## 使用事件 + +如果您使用的API可以随时间返回结果,或者您希望访问有关作业状态的信息,您可以使用事件接口获取更大的灵活性。这对于迭代的或生成器的端点特别有用,因为它们会生成一系列离散的响应值。 + +```js +import { client } from "@gradio/client"; + +function log_result(payload) { + const { + data: [translation] + } = payload; + + console.log(`翻译结果为:${translation}`); +} + +const app = await client("abidlabs/en2fr"); +const job = app.submit("/predict", ["Hello"]); + +job.on("data", log_result); +``` + +## 状态 + +事件接口还可以通过监听`"status"`事件来获取运行作业的状态。这将返回一个对象,其中包含以下属性:`status`(当前作业的人类可读状态,`"pending" | "generating" | "complete" | "error"`),`code`(作业的详细gradio code),`position`(此作业在队列中的当前位置),`queue_size`(总队列大小),`eta`(作业完成的预计时间),`success`(表示作业是否成功完成的布尔值)和`time`(作业状态生成的时间,是一个`Date`对象)。 + +```js +import { client } from "@gradio/client"; + +function log_status(status) { + console.log(`此作业的当前状态为:${JSON.stringify(status, null, 2)}`); +} + +const app = await client("abidlabs/en2fr"); +const job = app.submit("/predict", ["Hello"]); + +job.on("status", log_status); +``` + +## 取消作业 + +作业实例还具有`.cancel()`方法,用于取消已排队但尚未启动的作业。例如,如果您运行以下命令: + +```js +import { client } from "@gradio/client"; + +const app = await client("abidlabs/en2fr"); +const job_one = app.submit("/predict", ["Hello"]); +const job_two = app.submit("/predict", ["Friends"]); + +job_one.cancel(); +job_two.cancel(); +``` + +如果第一个作业已经开始处理,那么它将不会被取消,但客户端将不再监听更新(丢弃该作业)。如果第二个作业尚未启动,它将被成功取消并从队列中移除。 + +## 生成器端点 + +某些Gradio API端点不返回单个值,而是返回一系列值。您可以使用事件接口实时侦听这些值: + +```js +import { client } from "@gradio/client"; + +const app = await client("gradio/count_generator"); +const job = app.submit(0, [9]); + +job.on("data", (data) => console.log(data)); +``` + +这将按生成端点生成的值进行日志记录。 + +您还可以取消具有迭代输出的作业,在这种情况下,作业将立即完成。 + +```js +import { client } from "@gradio/client"; + +const app = await client("gradio/count_generator"); +const job = app.submit(0, [9]); + +job.on("data", (data) => console.log(data)); + +setTimeout(() => { + job.cancel(); +}, 3000); +``` diff --git a/testbed/gradio-app__gradio/guides/cn/06_client-libraries/fastapi-app-with-the-gradio-client.md b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/fastapi-app-with-the-gradio-client.md new file mode 100644 index 0000000000000000000000000000000000000000..58ef087ea77c24a73fc55f07f5b47a258549eb0b --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/fastapi-app-with-the-gradio-client.md @@ -0,0 +1,193 @@ +# 使用Gradio Python客户端构建FastAPI应用 + +Tags: CLIENT, API, WEB APP + +在本博客文章中,我们将演示如何使用 `gradio_client` [Python库](getting-started-with-the-python-client/) 来以编程方式创建Gradio应用的请求,通过创建一个示例FastAPI Web应用。我们将构建的 Web 应用名为“Acappellify”,它允许用户上传视频文件作为输入,并返回一个没有伴奏音乐的视频版本。它还会显示生成的视频库。 + +**先决条件** + +在开始之前,请确保您正在运行Python 3.9或更高版本,并已安装以下库: + +- `gradio_client` +- `fastapi` +- `uvicorn` + +您可以使用`pip`安装这些库: + +```bash +$ pip install gradio_client fastapi uvicorn +``` + +您还需要安装ffmpeg。您可以通过在终端中运行以下命令来检查您是否已安装ffmpeg: + +```bash +$ ffmpeg version +``` + +否则,通过按照这些说明安装ffmpeg [链接](https://www.hostinger.com/tutorials/how-to-install-ffmpeg)。 + +## 步骤1:编写视频处理函数 + +让我们从似乎最复杂的部分开始--使用机器学习从视频中去除音乐。 + +幸运的是,我们有一个现有的Space可以简化这个过程:[https://huggingface.co/spaces/abidlabs/music-separation](https://huggingface.co/spaces/abidlabs/music-separation)。该空间接受一个音频文件,并生成两个独立的音频文件:一个带有伴奏音乐,一个带有原始剪辑中的其他所有声音。非常适合我们的客户端使用! + +打开一个新的Python文件,比如`main.py`,并通过从`gradio_client`导入 `Client` 类,并将其连接到该Space: + +```py +from gradio_client import Client + +client = Client("abidlabs/music-separation") + +def acapellify(audio_path): + result = client.predict(audio_path, api_name="/predict") + return result[0] +``` + +所需的代码仅如上所示--请注意,API端点返回一个包含两个音频文件(一个没有音乐,一个只有音乐)的列表,因此我们只返回列表的第一个元素。 + +--- + +**注意**:由于这是一个公共Space,可能会有其他用户同时使用该Space,这可能导致速度较慢。您可以使用自己的[Hugging Face token](https://huggingface.co/settings/tokens)复制此Space,创建一个只有您自己访问权限的私有Space,并绕过排队。要做到这一点,只需用下面的代码替换上面的前两行: + +```py +from gradio_client import Client + +client = Client.duplicate("abidlabs/music-separation", hf_token=YOUR_HF_TOKEN) +``` + +其他的代码保持不变! + +--- + +现在,当然,我们正在处理视频文件,所以我们首先需要从视频文件中提取音频。为此,我们将使用`ffmpeg`库,它在处理音频和视频文件时做了很多艰巨的工作。使用`ffmpeg`的最常见方法是通过命令行,在Python的`subprocess`模块中调用它: + +我们的视频处理工作流包含三个步骤: + +1. 首先,我们从视频文件路径开始,并使用`ffmpeg`提取音频。 +2. 然后,我们通过上面的`acapellify()`函数传入音频文件。 +3. 最后,我们将新音频与原始视频合并,生成最终的Acapellify视频。 + +以下是Python中的完整代码,您可以将其添加到`main.py`文件中: + +```python +import subprocess + +def process_video(video_path): + old_audio = os.path.basename(video_path).split(".")[0] + ".m4a" + subprocess.run(['ffmpeg', '-y', '-i', video_path, '-vn', '-acodec', 'copy', old_audio]) + + new_audio = acapellify(old_audio) + + new_video = f"acap_{video_path}" + subprocess.call(['ffmpeg', '-y', '-i', video_path, '-i', new_audio, '-map', '0:v', '-map', '1:a', '-c:v', 'copy', '-c:a', 'aac', '-strict', 'experimental', f"static/{new_video}"]) + return new_video +``` + +如果您想了解所有命令行参数的详细信息,请阅读[ffmpeg文档](https://ffmpeg.org/ffmpeg.html),因为它们超出了本教程的范围。 + +## 步骤2: 创建一个FastAPI应用(后端路由) + +接下来,我们将创建一个简单的FastAPI应用程序。如果您以前没有使用过FastAPI,请查看[优秀的FastAPI文档](https://fastapi.tiangolo.com/)。否则,下面的基本模板将看起来很熟悉,我们将其添加到`main.py`中: + +```python +import os +from fastapi import FastAPI, File, UploadFile, Request +from fastapi.responses import HTMLResponse, RedirectResponse +from fastapi.staticfiles import StaticFiles +from fastapi.templating import Jinja2Templates + +app = FastAPI() +os.makedirs("static", exist_ok=True) +app.mount("/static", StaticFiles(directory="static"), name="static") +templates = Jinja2Templates(directory="templates") + +videos = [] + +@app.get("/", response_class=HTMLResponse) +async def home(request: Request): + return templates.TemplateResponse( + "home.html", {"request": request, "videos": videos}) + +@app.post("/uploadvideo/") +async def upload_video(video: UploadFile = File(...)): + new_video = process_video(video.filename) + videos.append(new_video) + return RedirectResponse(url='/', status_code=303) +``` + +在这个示例中,FastAPI应用程序有两个路由:`/` 和 `/uploadvideo/`。 + +`/` 路由返回一个显示所有上传视频的画廊的HTML模板。 + +`/uploadvideo/` 路由接受一个带有`UploadFile`对象的 `POST` 请求,表示上传的视频文件。视频文件通过`process_video()`方法进行 "acapellify",并将输出视频存储在一个列表中,该列表在内存中存储了所有上传的视频。 + +请注意,这只是一个非常基本的示例,如果这是一个发布应用程序,则需要添加更多逻辑来处理文件存储、用户身份验证和安全性考虑等。 + +## 步骤3:创建一个FastAPI应用(前端模板) + +最后,我们创建Web应用的前端。首先,在与`main.py`相同的目录下创建一个名为`templates`的文件夹。然后,在`templates`文件夹中创建一个名为`home.html`的模板。下面是最终的文件结构: + +```csv +├── main.py +├── templates +│ └── home.html +``` + +将以下内容写入`home.html`文件中: + +```html +<!DOCTYPE html> <html> <head> <title> 视频库 </title> <style> +body { font-family: sans-serif; margin: 0; padding: 0; background-color: +#f5f5f5; } h1 { text-align: center; margin-top: 30px; margin-bottom: 20px; } +.gallery { display: flex; flex-wrap: wrap; justify-content: center; gap: 20px; +padding: 20px; } .video { border: 2px solid #ccc; box-shadow: 0px 0px 10px +rgba(0, 0, 0, 0.2); border-radius: 5px; overflow: hidden; width: 300px; +margin-bottom: 20px; } .video video { width: 100%; height: 200px; } .video p { +text-align: center; margin: 10px 0; } form { margin-top: 20px; text-align: +center; } input[type="file"] { display: none; } .upload-btn { display: +inline-block; background-color: #3498db; color: #fff; padding: 10px 20px; +font-size: 16px; border: none; border-radius: 5px; cursor: pointer; } +.upload-btn:hover { background-color: #2980b9; } .file-name { margin-left: 10px; +} </style> </head> <body> <h1> 视频库 </h1> {% if videos %} +<div class="gallery"> {% for video in videos %} <div class="video"> +<video controls> <source src="{{ url_for('static', path=video) }}" +type="video/mp4"> 您的浏览器不支持视频标签。 </video> <p>{{ video +}}</p> </div> {% endfor %} </div> {% else %} <p> +尚未上传任何视频。</p> {% endif %} <form action="/uploadvideo/" +method="post" enctype="multipart/form-data"> <label for="video-upload" +class="upload-btn"> 选择视频文件 </label> <input type="file" name="video" +id="video-upload"> <span class="file-name"></span> <button +type="submit" class="upload-btn"> 上传 </button> </form> <script> // +在表单中显示所选文件名 const fileUpload = +document.getElementById("video-upload"); const fileName = +document.querySelector(".file-name"); fileUpload.addEventListener("change", (e) +=> { fileName.textContent = e.target.files[0].name; }); </script> </body> +</html> +``` + +## 第4步:运行 FastAPI 应用 + +最后,我们准备好运行由 Gradio Python 客户端提供支持的 FastAPI 应用程序。 + +打开终端并导航到包含 `main.py` 文件的目录,然后在终端中运行以下命令: + +```bash +$ uvicorn main:app +``` + +您应该会看到如下输出: + +```csv +Loaded as API: https://abidlabs-music-separation.hf.space ✔ +INFO: Started server process [1360] +INFO: Waiting for application startup. +INFO: Application startup complete. +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +``` + +就是这样!开始上传视频,您将在响应中得到一些“acapellified”视频(处理时间根据您的视频长度可能需要几秒钟到几分钟)。以下是上传两个视频后 UI 的外观: + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/acapellify.png) + +如果您想了解如何在项目中使用 Gradio Python 客户端的更多信息,请[阅读专门的指南](/getting-started-with-the-python-client/)。 diff --git a/testbed/gradio-app__gradio/guides/cn/06_client-libraries/gradio-and-llm-agents.md b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/gradio-and-llm-agents.md new file mode 100644 index 0000000000000000000000000000000000000000..3e15045f6cd0ff7e0927422d6861f56732b70875 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/06_client-libraries/gradio-and-llm-agents.md @@ -0,0 +1,135 @@ +# Gradio & LLM Agents 🤝 + +非常强大的大型语言模型(LLM),如果我们能赋予它们完成专门任务的技能,它们将变得更加强大。 + +[gradio_tools](https://github.com/freddyaboulton/gradio-tools)库可以将任何[Gradio](https://github.com/gradio-app/gradio)应用程序转化为[工具](https://python.langchain.com/en/latest/modules/agents/tools.html),供[代理](https://docs.langchain.com/docs/components/agents/agent)使用以完成任务。例如,一个LLM可以使用Gradio工具转录在网上找到的语音记录,然后为您summarize它。或者它可以使用不同的Gradio工具对您的Google Drive上的文档应用OCR,然后回答相关问题。 + +本指南将展示如何使用`gradio_tools`让您的LLM代理访问全球托管的最先进的Gradio应用程序。尽管`gradio_tools`与不止一个代理框架兼容,但本指南将重点介绍[Langchain代理](https://docs.langchain.com/docs/components/agents/)。 + +## 一些背景信息 + +### 代理是什么? + +[LangChain代理](https://docs.langchain.com/docs/components/agents/agent)是一个大型语言模型(LLM),它根据使用其众多工具之一的输入来生成输出。 + +### Gradio是什么? + +[Gradio](https://github.com/gradio-app/gradio)是用于构建机器学习Web应用程序并与全球共享的事实上的标准框架-完全由Python驱动!🐍 + +## gradio_tools - 一个端到端的示例 + +要开始使用`gradio_tools`,您只需要导入和初始化工具,然后将其传递给langchain代理! + +在下面的示例中,我们导入`StableDiffusionPromptGeneratorTool`以创建一个良好的稳定扩散提示, +`StableDiffusionTool`以使用我们改进的提示创建一张图片,`ImageCaptioningTool`以为生成的图片加上标题,以及 +`TextToVideoTool`以根据提示创建一个视频。 + +然后,我们告诉我们的代理创建一张狗正在滑板的图片,但在使用图像生成器之前请先改进我们的提示。我们还要求 +它为生成的图片添加标题并创建一个视频。代理可以根据需要决定使用哪个工具,而不需要我们明确告知。 + +```python +import os + +if not os.getenv("OPENAI_API_KEY"): + raise ValueError("OPENAI_API_KEY 必须设置 ") + +from langchain.agents import initialize_agent +from langchain.llms import OpenAI +from gradio_tools import (StableDiffusionTool, ImageCaptioningTool, StableDiffusionPromptGeneratorTool, + TextToVideoTool) + +from langchain.memory import ConversationBufferMemory + +llm = OpenAI(temperature=0) +memory = ConversationBufferMemory(memory_key="chat_history") +tools = [StableDiffusionTool().langchain, ImageCaptioningTool().langchain, + StableDiffusionPromptGeneratorTool().langchain, TextToVideoTool().langchain] + +agent = initialize_agent(tools, llm, memory=memory, agent="conversational-react-description", verbose=True) +output = agent.run(input=("Please create a photo of a dog riding a skateboard " + "but improve my prompt prior to using an image generator." + "Please caption the generated image and create a video for it using the improved prompt.")) +``` + +您会注意到我们正在使用一些与`gradio_tools`一起提供的预构建工具。请参阅此[文档](https://github.com/freddyaboulton/gradio-tools#gradio-tools-gradio--llm-agents)以获取完整的`gradio_tools`工具列表。 +如果您想使用当前不在`gradio_tools`中的工具,很容易添加您自己的工具。下一节将介绍如何添加自己的工具。 + +## gradio_tools - 创建自己的工具 + +核心抽象是`GradioTool`,它允许您为LLM定义一个新的工具,只要您实现标准接口: + +```python +class GradioTool(BaseTool): + + def __init__(self, name: str, description: str, src: str) -> None: + + @abstractmethod + def create_job(self, query: str) -> Job: + pass + + @abstractmethod + def postprocess(self, output: Tuple[Any] | Any) -> str: + pass +``` + +需要满足的要求是: + +1. 工具的名称 +2. 工具的描述。这非常关键!代理根据其描述决定使用哪个工具。请确切描述输入和输出应该是什么样的,最好包括示例。 +3. Gradio应用程序的url或space id,例如`freddyaboulton/calculator`。基于该值,`gradio_tool`将通过API创建一个[gradio客户端](https://github.com/gradio-app/gradio/blob/main/client/python/README.md)实例来查询上游应用程序。如果您不熟悉gradio客户端库,请确保点击链接了解更多信息。 +4. create_job - 给定一个字符串,该方法应该解析该字符串并从客户端返回一个job。大多数情况下,这只需将字符串传递给客户端的`submit`函数即可。有关创建job的更多信息,请参阅[这里](https://github.com/gradio-app/gradio/blob/main/client/python/README.md#making-a-prediction) +5. postprocess - 给定作业的结果,将其转换为LLM可以向用户显示的字符串。 +6. _Optional可选_ - 某些库,例如[MiniChain](https://github.com/srush/MiniChain/tree/main),可能需要一些关于工具使用的底层gradio输入和输出类型的信息。默认情况下,这将返回gr.Textbox(),但如果您想提供更准确的信息,请实现工具的`_block_input(self, gr)`和`_block_output(self, gr)`方法。`gr`变量是gradio模块(通过`import gradio as gr`获得的结果)。`GradiTool`父类将自动引入`gr`并将其传递给`_block_input`和`_block_output`方法。 + +就是这样! + +一旦您创建了自己的工具,请在`gradio_tools`存储库上发起拉取请求!我们欢迎所有贡献。 + +## 示例工具 - 稳定扩散 + +以下是作为示例的稳定扩散工具代码: + +from gradio_tool import GradioTool +import os + +class StableDiffusionTool(GradioTool): +"""Tool for calling stable diffusion from llm""" + + def __init__( + self, + name="StableDiffusion", + description=( + "An image generator. Use this to generate images based on " + "text input. Input should be a description of what the image should " + "look like. The output will be a path to an image file." + ), + src="gradio-client-demos/stable-diffusion", + hf_token=None, + ) -> None: + super().__init__(name, description, src, hf_token) + + def create_job(self, query: str) -> Job: + return self.client.submit(query, "", 9, fn_index=1) + + def postprocess(self, output: str) -> str: + return [os.path.join(output, i) for i in os.listdir(output) if not i.endswith("json")][0] + + def _block_input(self, gr) -> "gr.components.Component": + return gr.Textbox() + + def _block_output(self, gr) -> "gr.components.Component": + return gr.Image() + +``` +关于此实现的一些注意事项: +1. 所有的 `GradioTool` 实例都有一个名为 `client` 的属性,它指向底层的 [gradio 客户端](https://github.com/gradio-app/gradio/tree/main/client/python#gradio_client-use-a-gradio-app-as-an-api----in-3-lines-of-python),这就是您在 `create_job` 方法中应该使用的内容。 + +2. `create_job` 方法只是将查询字符串传递给客户端的 `submit` 函数,并硬编码了一些其他参数,即负面提示字符串和指南缩放。我们可以在后续版本中修改我们的工具,以便从输入字符串中接受这些值。 + +3. `postprocess` 方法只是返回由稳定扩散空间创建的图库中的第一个图像。我们使用 `os` 模块获取图像的完整路径。 + +## Conclusion + +现在,您已经知道如何通过数千个运行在野外的 gradio 空间来扩展您的 LLM 的能力了! +同样,我们欢迎对 [gradio_tools](https://github.com/freddyaboulton/gradio-tools) 库的任何贡献。我们很兴奋看到大家构建的工具! +``` diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/building-a-pictionary-app.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/building-a-pictionary-app.md new file mode 100644 index 0000000000000000000000000000000000000000..6b5052d39d57fc6348b2a5cc4581a2d42a2809c0 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/building-a-pictionary-app.md @@ -0,0 +1,108 @@ +# 构建一个 Pictionary 应用程序 + +相关空间:https://huggingface.co/spaces/nateraw/quickdraw +标签:SKETCHPAD,LABELS,LIVE + +## 简介 + +一个算法能够有多好地猜出你在画什么?几年前,Google 发布了 **Quick Draw** 数据集,其中包含人类绘制的各种物体的图画。研究人员使用这个数据集训练模型来猜测 Pictionary 风格的图画。 + +这样的模型非常适合与 Gradio 的 _sketchpad_ 输入一起使用,因此在本教程中,我们将使用 Gradio 构建一个 Pictionary 网络应用程序。我们将能够完全使用 Python 构建整个网络应用程序,并且将如下所示(尝试画点什么!): + + + +让我们开始吧!本指南介绍了如何构建一个 pictionary 应用程序(逐步): + +1. [设置 Sketch Recognition 模型](#1-set-up-the-sketch-recognition-model) +2. [定义 `predict` 函数](#2-define-a-predict-function) +3. [创建 Gradio 界面](#3-create-a-gradio-interface) + +### 先决条件 + +确保您已经[安装](/getting_started)了 `gradio` Python 包。要使用预训练的草图模型,还需要安装 `torch`。 + +## 1. 设置 Sketch Recognition 模型 + +首先,您将需要一个草图识别模型。由于许多研究人员已经在 Quick Draw 数据集上训练了自己的模型,在本教程中,我们将使用一个预训练模型。我们的模型是一个由 Nate Raw 训练的轻量级 1.5MB 模型,您可以在此处[下载](https://huggingface.co/spaces/nateraw/quickdraw/blob/main/pytorch_model.bin)。 + +如果您感兴趣,这是用于训练模型的[代码](https://github.com/nateraw/quickdraw-pytorch)。我们将简单地使用 PyTorch 加载预训练的模型,如下所示: + +```python +import torch +from torch import nn + +model = nn.Sequential( + nn.Conv2d(1, 32, 3, padding='same'), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Conv2d(32, 64, 3, padding='same'), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Conv2d(64, 128, 3, padding='same'), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Flatten(), + nn.Linear(1152, 256), + nn.ReLU(), + nn.Linear(256, len(LABELS)), +) +state_dict = torch.load('pytorch_model.bin', map_location='cpu') +model.load_state_dict(state_dict, strict=False) +model.eval() +``` + +## 2. 定义 `predict` 函数 + +接下来,您需要定义一个函数,该函数接受*用户输入*(在本例中是一个涂鸦图像)并返回预测结果。预测结果应该作为一个字典返回,其中键是类名,值是置信度概率。我们将从这个[文本文件](https://huggingface.co/spaces/nateraw/quickdraw/blob/main/class_names.txt)加载类名。 + +对于我们的预训练模型,代码如下所示: + +```python +from pathlib import Path + +LABELS = Path('class_names.txt').read_text().splitlines() + +def predict(img): + x = torch.tensor(img, dtype=torch.float32).unsqueeze(0).unsqueeze(0) / 255. + with torch.no_grad(): + out = model(x) + probabilities = torch.nn.functional.softmax(out[0], dim=0) + values, indices = torch.topk(probabilities, 5) + confidences = {LABELS[i]: v.item() for i, v in zip(indices, values)} + return confidences +``` + +让我们分解一下。该函数接受一个参数: + +- `img`:输入图像,作为一个 `numpy` 数组 + +然后,函数将图像转换为 PyTorch 的 `tensor`,将其通过模型,并返回: + +- `confidences`:前五个预测的字典,其中键是类别标签,值是置信度概率 + +## 3. 创建一个 Gradio 界面 + +现在我们已经设置好预测函数,我们可以在其周围创建一个 Gradio 界面。 + +在本例中,输入组件是一个 `sketchpad`,使用方便的字符串快捷方式 `"sketchpad"` 创建一个用户可以在其上绘制的画布,并处理将其转换为 numpy 数组的预处理。 + +输出组件将是一个 `"label"`,以良好的形式显示前几个标签。 + +最后,我们将添加一个额外的参数,设置 `live=True`,允许我们的界面实时运行,每当用户在涂鸦板上绘制时,就会调整其预测结果。Gradio 的代码如下所示: + +```python +import gradio as gr + +gr.Interface(fn=predict, + inputs="sketchpad", + outputs="label", + live=True).launch() +``` + +这将产生以下界面,您可以在浏览器中尝试(尝试画一些东西,比如 "snake" 或 "laptop"): + + + +--- + +完成!这就是构建一个 Pictionary 风格的猜词游戏所需的所有代码。玩得开心,并尝试找到一些边缘情况🧐 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/create-your-own-friends-with-a-gan.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/create-your-own-friends-with-a-gan.md new file mode 100644 index 0000000000000000000000000000000000000000..d610fea7fe57c8f2d40f8eb6ed303a3e259974fc --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/create-your-own-friends-with-a-gan.md @@ -0,0 +1,226 @@ +# 使用 GAN 创建您自己的朋友 + +spaces/NimaBoscarino/cryptopunks, https://huggingface.co/spaces/nateraw/cryptopunks-generator +Tags: GAN, IMAGE, HUB + +由 Nima BoscarinoNate Raw 贡献 + +## 简介 + +最近,加密货币、NFTs 和 Web3 运动似乎都非常流行!数字资产以惊人的金额在市场上上市,几乎每个名人都推出了自己的 NFT 收藏。虽然您的加密资产可能是应税的,例如在加拿大(https://www.canada.ca/en/revenue-agency/programs/about-canada-revenue-agency-cra/compliance/digital-currency/cryptocurrency-guide.html),但今天我们将探索一些有趣且无税的方法来生成自己的一系列过程生成的 CryptoPunks(https://www.larvalabs.com/cryptopunks)。 + +生成对抗网络(GANs),通常称为 GANs,是一类特定的深度学习模型,旨在通过学习输入数据集来创建(生成!)与原始训练集中的元素具有令人信服的相似性的新材料。众所周知,网站[thispersondoesnotexist.com](https://thispersondoesnotexist.com/)通过名为 StyleGAN2 的模型生成了栩栩如生但是合成的人物图像而迅速走红。GANs 在机器学习领域获得了人们的关注,现在被用于生成各种图像、文本甚至音乐! + +今天我们将简要介绍 GAN 的高级直觉,然后我们将围绕一个预训练的 GAN 构建一个小型演示,看看这一切都是怎么回事。下面是我们将要组合的东西的一瞥: + + + +### 先决条件 + +确保已经[安装](/getting_started)了 `gradio` Python 包。要使用预训练模型,请还安装 `torch` 和 `torchvision`。 + +## GANs:简介 + +最初在[Goodfellow 等人 2014 年的论文](https://arxiv.org/abs/1406.2661)中提出,GANs 由互相竞争的神经网络组成,旨在相互智能地欺骗对方。一种网络,称为“生成器”,负责生成图像。另一个网络,称为“鉴别器”,从生成器一次接收一张图像,以及来自训练数据集的 **real 真实**图像。然后,鉴别器必须猜测:哪张图像是假的? + +生成器不断训练以创建对鉴别器更难以识别的图像,而鉴别器每次正确检测到伪造图像时,都会为生成器设置更高的门槛。随着网络之间的这种竞争(**adversarial 对抗性!**),生成的图像改善到了对人眼来说无法区分的地步! + +如果您想更深入地了解 GANs,可以参考[Analytics Vidhya 上的这篇优秀文章](https://www.analyticsvidhya.com/blog/2021/06/a-detailed-explanation-of-gan-with-implementation-using-tensorflow-and-keras/)或这个[PyTorch 教程](https://pytorch.org/tutorials/beginner/dcgan_faces_tutorial.html)。不过,现在我们将深入看一下演示! + +## 步骤 1 - 创建生成器模型 + +要使用 GAN 生成新图像,只需要生成器模型。生成器可以使用许多不同的架构,但是对于这个演示,我们将使用一个预训练的 GAN 生成器模型,其架构如下: + +```python +from torch import nn + +class Generator(nn.Module): + # 有关nc,nz和ngf的解释,请参见下面的链接 + # https://pytorch.org/tutorials/beginner/dcgan_faces_tutorial.html#inputs + def __init__(self, nc=4, nz=100, ngf=64): + super(Generator, self).__init__() + self.network = nn.Sequential( + nn.ConvTranspose2d(nz, ngf * 4, 3, 1, 0, bias=False), + nn.BatchNorm2d(ngf * 4), + nn.ReLU(True), + nn.ConvTranspose2d(ngf * 4, ngf * 2, 3, 2, 1, bias=False), + nn.BatchNorm2d(ngf * 2), + nn.ReLU(True), + nn.ConvTranspose2d(ngf * 2, ngf, 4, 2, 0, bias=False), + nn.BatchNorm2d(ngf), + nn.ReLU(True), + nn.ConvTranspose2d(ngf, nc, 4, 2, 1, bias=False), + nn.Tanh(), + ) + + def forward(self, input): + output = self.network(input) + return output +``` + +我们正在使用来自[此 repo 的 @teddykoker](https://github.com/teddykoker/cryptopunks-gan/blob/main/train.py#L90)的生成器模型,您还可以在那里看到原始的鉴别器模型结构。 + +在实例化模型之后,我们将加载来自 Hugging Face Hub 的权重,存储在[nateraw/cryptopunks-gan](https://huggingface.co/nateraw/cryptopunks-gan)中: + +```python +from huggingface_hub import hf_hub_download +import torch + +model = Generator() +weights_path = hf_hub_download('nateraw/cryptopunks-gan', 'generator.pth') +model.load_state_dict(torch.load(weights_path, map_location=torch.device('cpu'))) # 如果有可用的GPU,请使用'cuda' +``` + +## 步骤 2 - 定义“predict”函数 + +`predict` 函数是使 Gradio 工作的关键!我们通过 Gradio 界面选择的任何输入都将通过我们的 `predict` 函数传递,该函数应对输入进行操作并生成我们可以通过 Gradio 输出组件显示的输出。对于 GANs,常见的做法是将随机噪声传入我们的模型作为输入,因此我们将生成一张随机数的张量并将其传递给模型。然后,我们可以使用 `torchvision` 的 `save_image` 函数将模型的输出保存为 `png` 文件,并返回文件名: + +```python +from torchvision.utils import save_image + +def predict(seed): + num_punks = 4 + torch.manual_seed(seed) + z = torch.randn(num_punks, 100, 1, 1) + punks = model(z) + save_image(punks, "punks.png", normalize=True) + return 'punks.png' +``` + +我们给 `predict` 函数一个 `seed` 参数,这样我们就可以使用一个种子固定随机张量生成。然后,我们可以通过传入相同的种子再次查看生成的 punks。 + +_注意!_ 我们的模型需要一个 100x1x1 的输入张量进行单次推理,或者 (BatchSize)x100x1x1 来生成一批图像。在这个演示中,我们每次生成 4 个 punk。 + +## 第三步—创建一个 Gradio 接口 + +此时,您甚至可以运行您拥有的代码 `predict()`,并在您的文件系统中找到新生成的 punk 在 `./punks.png`。然而,为了制作一个真正的交互演示,我们将用 Gradio 构建一个简单的界面。我们的目标是: + +- 设置一个滑块输入,以便用户可以选择“seed”值 +- 使用图像组件作为输出,展示生成的 punk +- 使用我们的 `predict()` 函数来接受种子并生成图像 + +通过使用 `gr.Interface()`,我们可以使用一个函数调用来定义所有这些 : + +```python +import gradio as gr + +gr.Interface( + predict, + inputs=[ + gr.Slider(0, 1000, label='Seed', default=42), + ], + outputs="image", +).launch() +``` + +启动界面后,您应该会看到像这样的东西 : + + + +## 第四步—更多 punk! + +每次生成 4 个 punk 是一个好的开始,但是也许我们想控制每次想生成多少。通过简单地向我们传递给 `gr.Interface` 的 `inputs` 列表添加另一项即可向我们的 Gradio 界面添加更多输入 : + +```python +gr.Interface( + predict, + inputs=[ + gr.Slider(0, 1000, label='Seed', default=42), + gr.Slider(4, 64, label='Number of Punks', step=1, default=10), # 添加另一个滑块! + ], + outputs="image", +).launch() +``` + +新的输入将传递给我们的 `predict()` 函数,所以我们必须对该函数进行一些更改,以接受一个新的参数 : + +```python +def predict(seed, num_punks): + torch.manual_seed(seed) + z = torch.randn(num_punks, 100, 1, 1) + punks = model(z) + save_image(punks, "punks.png", normalize=True) + return 'punks.png' +``` + +当您重新启动界面时,您应该会看到一个第二个滑块,它可以让您控制 punk 的数量! + +## 第五步-完善它 + +您的 Gradio 应用已经准备好运行了,但是您可以添加一些额外的功能来使其真正准备好发光 ✨ + +我们可以添加一些用户可以轻松尝试的示例,通过将其添加到 `gr.Interface` 中实现 : + +```python +gr.Interface( + # ... + # 将所有内容保持不变,然后添加 + examples=[[123, 15], [42, 29], [456, 8], [1337, 35]], +).launch(cache_examples=True) # cache_examples是可选的 +``` + +`examples` 参数接受一个列表的列表,其中子列表中的每个项目的顺序与我们列出的 `inputs` 的顺序相同。所以在我们的例子中,`[seed, num_punks]`。试一试吧! + +您还可以尝试在 `gr.Interface` 中添加 `title`、`description` 和 `article`。每个参数都接受一个字符串,所以试试看发生了什么👀 `article` 也接受 HTML,如[前面的指南](./key_features/#descriptive-content)所述! + +当您完成所有操作后,您可能会得到类似于这样的结果 : + + + +供参考,这是我们的完整代码 : + +```python +import torch +from torch import nn +from huggingface_hub import hf_hub_download +from torchvision.utils import save_image +import gradio as gr + +class Generator(nn.Module): + # 关于nc、nz和ngf的解释,请参见下面的链接 + # https://pytorch.org/tutorials/beginner/dcgan_faces_tutorial.html#inputs + def __init__(self, nc=4, nz=100, ngf=64): + super(Generator, self).__init__() + self.network = nn.Sequential( + nn.ConvTranspose2d(nz, ngf * 4, 3, 1, 0, bias=False), + nn.BatchNorm2d(ngf * 4), + nn.ReLU(True), + nn.ConvTranspose2d(ngf * 4, ngf * 2, 3, 2, 1, bias=False), + nn.BatchNorm2d(ngf * 2), + nn.ReLU(True), + nn.ConvTranspose2d(ngf * 2, ngf, 4, 2, 0, bias=False), + nn.BatchNorm2d(ngf), + nn.ReLU(True), + nn.ConvTranspose2d(ngf, nc, 4, 2, 1, bias=False), + nn.Tanh(), + ) + + def forward(self, input): + output = self.network(input) + return output + +model = Generator() +weights_path = hf_hub_download('nateraw/cryptopunks-gan', 'generator.pth') +model.load_state_dict(torch.load(weights_path, map_location=torch.device('cpu'))) # 如果您有可用的GPU,使用'cuda' + +def predict(seed, num_punks): + torch.manual_seed(seed) + z = torch.randn(num_punks, 100, 1, 1) + punks = model(z) + save_image(punks, "punks.png", normalize=True) + return 'punks.png' + +gr.Interface( + predict, + inputs=[ + gr.Slider(0, 1000, label='Seed', default=42), + gr.Slider(4, 64, label='Number of Punks', step=1, default=10), + ], + outputs="image", + examples=[[123, 15], [42, 29], [456, 8], [1337, 35]], +).launch(cache_examples=True) +``` + +--- + +恭喜!你已经成功构建了自己的基于 GAN 的 CryptoPunks 生成器,配备了一个时尚的 Gradio 界面,使任何人都能轻松使用。现在你可以在 Hub 上[寻找更多的 GANs](https://huggingface.co/models?other=gan)(或者自己训练)并继续制作更多令人赞叹的演示项目。🤗 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/creating-a-chatbot.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/creating-a-chatbot.md new file mode 100644 index 0000000000000000000000000000000000000000..0c2be1db38b941423511134250ac3fbc5bc81fb0 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/creating-a-chatbot.md @@ -0,0 +1,82 @@ +# 如何创建一个聊天机器人 + +Tags: NLP, TEXT, CHAT +Related spaces: https://huggingface.co/spaces/gradio/chatbot_streaming, https://huggingface.co/spaces/project-baize/Baize-7B, + +## 简介 + +聊天机器人在自然语言处理 (NLP) 研究和工业界被广泛使用。由于聊天机器人是直接由客户和最终用户使用的,因此验证聊天机器人在面对各种输入提示时的行为是否符合预期至关重要。 + +通过使用 `gradio`,您可以轻松构建聊天机器人模型的演示,并与用户共享,或使用直观的聊天机器人图形界面自己尝试。 + +本教程将展示如何使用 Gradio 制作几种不同类型的聊天机器人用户界面:首先是一个简单的文本显示界面,其次是一个用于流式文本响应的界面,最后一个是可以处理媒体文件的聊天机器人。我们创建的聊天机器人界面将如下所示: + +$ 演示 _ 聊天机器人 _ 流式 + +**先决条件**:我们将使用 `gradio.Blocks` 类来构建我们的聊天机器人演示。 +如果您对此还不熟悉,可以[先阅读 Blocks 指南](https://gradio.app/quickstart/#blocks-more-flexibility-and-control)。同时,请确保您使用的是**最新版本**的 Gradio:`pip install --upgrade gradio`。 + +## 简单聊天机器人演示 + +让我们从重新创建上面的简单演示开始。正如您可能已经注意到的,我们的机器人只是随机对任何输入回复 " 你好吗?"、" 我爱你 " 或 " 我非常饿 "。这是使用 Gradio 创建此演示的代码: + +$ 代码 \_ 简单聊天机器人 + +这里有三个 Gradio 组件: + +- 一个 `Chatbot`,其值将整个对话的历史记录作为用户和机器人之间的响应对列表存储。 +- 一个文本框,用户可以在其中键入他们的消息,然后按下 Enter/ 提交以触发聊天机器人的响应 +- 一个 `ClearButton` 按钮,用于清除文本框和整个聊天机器人的历史记录 + +我们有一个名为 `respond()` 的函数,它接收聊天机器人的整个历史记录,附加一个随机消息,等待 1 秒,然后返回更新后的聊天历史记录。`respond()` 函数在返回时还清除了文本框。 + +当然,实际上,您会用自己更复杂的函数替换 `respond()`,该函数可能调用预训练模型或 API 来生成响应。 + +$ 演示 \_ 简单聊天机器人 + +## 为聊天机器人添加流式响应 + +我们可以通过几种方式来改进上述聊天机器人的用户体验。首先,我们可以流式传输响应,以便用户不必等待太长时间才能生成消息。其次,我们可以让用户的消息在聊天历史记录中立即出现,同时聊天机器人的响应正在生成。以下是实现这一点的代码: + +$code_chatbot_streaming + +当用户提交他们的消息时,您会注意到我们现在使用 `.then()` 与三个事件事件 _链_ 起来: + +1. 第一个方法 `user()` 用用户消息更新聊天机器人并清除输入字段。此方法还使输入字段处于非交互状态,以防聊天机器人正在响应时用户发送另一条消息。由于我们希望此操作立即发生,因此我们设置 `queue=False`,以跳过任何可能的队列。聊天机器人的历史记录附加了`(user_message, None)`,其中的 `None` 表示机器人未作出响应。 + +2. 第二个方法 `bot()` 使用机器人的响应更新聊天机器人的历史记录。我们不是创建新消息,而是将先前创建的 `None` 消息替换为机器人的响应。最后,我们逐个字符构造消息并 `yield` 正在构建的中间输出。Gradio 会自动将带有 `yield` 关键字的任何函数 [转换为流式输出接口](/key-features/#iterative-outputs)。 + +3. 第三个方法使输入字段再次可以交互,以便用户可以向机器人发送另一条消息。 + +当然,实际上,您会用自己更复杂的函数替换 `bot()`,该函数可能调用预训练模型或 API 来生成响应。 + +最后,我们通过运行 `demo.queue()` 启用排队,这对于流式中间输出是必需的。您可以通过滚动到本页面顶部的演示来尝试改进后的聊天机器人。 + +## 添加 Markdown、图片、音频或视频 + +`gr.Chatbot` 组件支持包含加粗、斜体和代码等一部分 Markdown 功能。例如,我们可以编写一个函数,以粗体回复用户的消息,类似于 **That's cool!**,如下所示: + +```py +def bot(history): + response = "**That's cool!**" + history[-1][1] = response + return history +``` + +此外,它还可以处理图片、音频和视频等媒体文件。要传递媒体文件,我们必须将文件作为两个字符串的元组传递,如`(filepath, alt_text)` 所示。`alt_text` 是可选的,因此您还可以只传入只有一个元素的元组`(filepath,)`,如下所示: + +```python +def add_file(history, file): + history = history + [((file.name,), None)] + return history +``` + +将所有这些放在一起,我们可以创建一个*多模态*聊天机器人,其中包含一个文本框供用户提交文本,以及一个文件上传按钮供提交图像 / 音频 / 视频文件。余下的代码看起来与之前的代码几乎相同: + +$code_chatbot_multimodal +$demo_chatbot_multimodal + +你完成了!这就是构建聊天机器人模型界面所需的所有代码。最后,我们将结束我们的指南,并提供一些在 Spaces 上运行的聊天机器人的链接,以让你了解其他可能性: + +- [project-baize/Baize-7B](https://huggingface.co/spaces/project-baize/Baize-7B):一个带有停止生成和重新生成响应功能的样式化聊天机器人。 +- [MAGAer13/mPLUG-Owl](https://huggingface.co/spaces/MAGAer13/mPLUG-Owl):一个多模态聊天机器人,允许您对响应进行投票。 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/creating-a-new-component.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/creating-a-new-component.md new file mode 100644 index 0000000000000000000000000000000000000000..e7f3ed5133835bb693ae8c4115adcceadad40450 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/creating-a-new-component.md @@ -0,0 +1,390 @@ +# 如何创建一个新组件 + +## 简介 + +本指南旨在说明如何添加一个新组件,你可以在 Gradio 应用程序中使用该组件。该指南将通过代码片段逐步展示如何添加[ColorPicker](https://gradio.app/docs/#colorpicker)组件。 + +## 先决条件 + +确保您已经按照[CONTRIBUTING.md](https://github.com/gradio-app/gradio/blob/main/CONTRIBUTING.md)指南设置了本地开发环境(包括客户端和服务器端)。 + +以下是在 Gradio 上创建新组件的步骤: + +1. [创建一个新的 Python 类并导入它](#1-create-a-new-python-class-and-import-it) +2. [创建一个新的 Svelte 组件](#2-create-a-new-svelte-component) +3. [创建一个新的演示](#3-create-a-new-demo) + +## 1. 创建一个新的 Python 类并导入它 + +首先要做的是在[components.py](https://github.com/gradio-app/gradio/blob/main/gradio/components.py)文件中创建一个新的类。这个 Python 类应该继承自一系列的基本组件,并且应该根据要添加的组件的类型(例如输入、输出或静态组件)将其放置在文件中的正确部分。 +一般来说,建议参考现有的组件(例如[TextBox](https://github.com/gradio-app/gradio/blob/main/gradio/components.py#L290)),将其代码复制为骨架,然后根据实际情况进行修改。 + +让我们来看一下添加到[components.py](https://github.com/gradio-app/gradio/blob/main/gradio/components.py)文件中的 ColorPicker 组件的类: + +```python +@document() +class ColorPicker(Changeable, Submittable, IOComponent): + """ + 创建一个颜色选择器,用户可以选择颜色作为字符串输入。 + 预处理:将选择的颜色值作为{str}传递给函数。 + 后处理:期望从函数中返回一个{str},并将颜色选择器的值设置为它。 + 示例格式:表示颜色的十六进制{str},例如红色的"#ff0000"。 + 演示:color_picker,color_generator + """ + + def __init__( + self, + value: str = None, + *, + label: Optional[str] = None, + show_label: bool = True, + interactive: Optional[bool] = None, + visible: bool = True, + elem_id: Optional[str] = None, + **kwargs, + ): + """ + Parameters: + """ + Parameters: + value: default text to provide in color picker. + label: component name in interface. + show_label: if True, will display label. + interactive: if True, will be rendered as an editable color picker; if False, editing will be disabled. If not provided, this is inferred based on whether the component is used as an input or output. + visible: If False, component will be hidden. + elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles. + """ + self.value = self.postprocess(value) + self.cleared_value = "#000000" + self.test_input = value + IOComponent.__init__( + self, + label=label, + show_label=show_label, + interactive=interactive, + visible=visible, + elem_id=elem_id, + **kwargs, + ) + + def get_config(self): + return { + "value": self.value, + **IOComponent.get_config(self), + } + + @staticmethod + def update( + value: Optional[Any] = None, + label: Optional[str] = None, + show_label: Optional[bool] = None, + visible: Optional[bool] = None, + interactive: Optional[bool] = None, + ): + return { + "value": value, + "label": label, + "show_label": show_label, + "visible": visible, + "interactive": interactive, + "__type__": "update", + } + + # 输入功能 + def preprocess(self, x: str | None) -> Any: + """ + Any preprocessing needed to be performed on function input. + Parameters: + x (str): text + Returns: + (str): text + """ + if x is None: + return None + else: + return str(x) + + def preprocess_example(self, x: str | None) -> Any: + """ + 在传递给主函数之前,对示例进行任何预处理。 + """ + if x is None: + return None + else: + return str(x) + + # 输出功能 + def postprocess(self, y: str | None): + """ + Any postprocessing needed to be performed on function output. + Parameters: + y (str | None): text + Returns: + (str | None): text + """ + if y is None: + return None + else: + return str(y) + + def deserialize(self, x): + """ + 将从调用接口的序列化输出(例如base64表示)转换为输出的人类可读版本(图像的路径等) + """ + return x +``` + +一旦定义完,就需要在[\_\_init\_\_](https://github.com/gradio-app/gradio/blob/main/gradio/__init__.py)模块类中导入新类,以使其可见。 + +```python + +from gradio.components import ( + ... + ColorPicker, + ... +) + +``` + +### 1.1 为 Python 类编写单元测试 + +在开发新组件时,还应为其编写一套单元测试。这些测试应该放在[gradio/test/test_components.py](https://github.com/gradio-app/gradio/blob/main/test/test_components.py)文件中。同样,如上所述,参考其他组件的测试(例如[Textbox](https://github.com/gradio-app/gradio/blob/main/test/test_components.py))并添加尽可能多的单元测试,以测试新组件的所有不同方面和功能。例如,为 ColorPicker 组件添加了以下测试: + +```python +class TestColorPicker(unittest.TestCase): + def test_component_functions(self): + """ + Preprocess, postprocess, serialize, save_flagged, restore_flagged, tokenize, get_config + """ + color_picker_input = gr.ColorPicker() + self.assertEqual(color_picker_input.preprocess("#000000"), "#000000") + self.assertEqual(color_picker_input.preprocess_example("#000000"), "#000000") + self.assertEqual(color_picker_input.postprocess(None), None) + self.assertEqual(color_picker_input.postprocess("#FFFFFF"), "#FFFFFF") + self.assertEqual(color_picker_input.serialize("#000000", True), "#000000") + + color_picker_input.interpretation_replacement = "unknown" + + self.assertEqual( + color_picker_input.get_config(), + { + "value": None, + "show_label": True, + "label": None, + "style": {}, + "elem_id": None, + "visible": True, + "interactive": None, + "name": "colorpicker", + }, + ) + + def test_in_interface_as_input(self): + """ + 接口、处理、解释 + """ + iface = gr.Interface(lambda x: x, "colorpicker", "colorpicker") + self.assertEqual(iface.process(["#000000"]), ["#000000"]) + + def test_in_interface_as_output(self): + """ + 接口、处理 + + """ + iface = gr.Interface(lambda x: x, "colorpicker", gr.ColorPicker()) + self.assertEqual(iface.process(["#000000"]), ["#000000"]) + + def test_static(self): + """ + 后处理 + """ + component = gr.ColorPicker("#000000") + self.assertEqual(component.get_config().get("value"), "#000000") +``` + +## 2. 创建一个新的 Svelte 组件 + +让我们来看看创建新组件的前端并将其与其 Python 代码映射起来的步骤: + +- 在 [js 文件夹](https://github.com/gradio-app/gradio/tree/main/js/) 中创建一个新的 UI-side Svelte 组件,并确定要放置在什么地方。选项包括:创建新组件的包(如果与现有组件完全不同),或将新组件添加到现有包中,例如 [form 包](https://github.com/gradio-app/gradio/tree/main/js/form)。例如,ColorPicker 组件被包含在 form 包中,因为它与已存在的组件相似。 +- 在您将 Svelte 组件放置的包的 src 文件夹中创建一个带有适当名称的文件,注意:名称必须以大写字母开头。这是“核心”组件,是没有 Gradio 特定功能了解的通用组件。最初,将任何文本 /HTML 添加到此文件,以便组件呈现任何内容。ColorPicker 的 Svelte 应用程序代码如下所示: + +```typescript + + + + +``` + +- 通过执行 `export { default as FileName } from "./FileName.svelte"`,在您将 Svelte 组件放置的包的 index.ts 文件中导出此文件。例如,在 [index.ts](https://github.com/gradio-app/gradio/blob/main/js/form/src/index.ts) 文件中导出了 ColorPicker 文件,并通过 `export { default as ColorPicker } from "./ColorPicker.svelte";` 执行导出。 +- 创建 [js/app/src/components](https://github.com/gradio-app/gradio/tree/main/js/app/src/components) 中的 Gradio 特定组件。这是一个 Gradio 包装器,处理库的特定逻辑,将必要的数据传递给核心组件,并附加任何必要的事件监听器。复制另一个组件的文件夹,重新命名并编辑其中的代码,保持结构不变。 + +在这里,您将拥有三个文件,第一个文件用于 Svelte 应用程序,具体如下所示: + +```typescript + + + + + + + + + +``` + +第二个文件包含了前端的测试,例如 ColorPicker 组件的测试: + +```typescript +import { test, describe, assert, afterEach } from "vitest"; +import { cleanup, render } from "@gradio/tootils"; + +import ColorPicker from "./ColorPicker.svelte"; +import type { LoadingStatus } from "../StatusTracker/types"; + +const loading_status = { + eta: 0, + queue_position: 1, + status: "complete" as LoadingStatus["status"], + scroll_to_output: false, + visible: true, + fn_index: 0 +}; + +describe("ColorPicker", () => { + afterEach(() => cleanup()); + + test("renders provided value", () => { + const { getByDisplayValue } = render(ColorPicker, { + loading_status, + show_label: true, + mode: "dynamic", + value: "#000000", + label: "ColorPicker" + }); + + const item: HTMLInputElement = getByDisplayValue("#000000"); + assert.equal(item.value, "#000000"); + }); + + test("changing the color should update the value", async () => { + const { component, getByDisplayValue } = render(ColorPicker, { + loading_status, + show_label: true, + mode: "dynamic", + value: "#000000", + label: "ColorPicker" + }); + + const item: HTMLInputElement = getByDisplayValue("#000000"); + + assert.equal(item.value, "#000000"); + + await component.$set({ + value: "#FFFFFF" + }); + + assert.equal(component.value, "#FFFFFF"); + }); +}); +``` + +The third one is the index.ts file: + +```typescript +export { default as Component } from "./ColorPicker.svelte"; +export const modes = ["static", "dynamic"]; +``` + +- `directory.ts` 文件中添加组件的映射。复制并粘贴任何组件的映射行,并编辑其文本。键名必须是 Python 库中实际组件名称的小写版本。例如,对于 ColorPicker 组件,映射如下所示: + +```typescript +export const component_map = { +... +colorpicker: () => import("./ColorPicker"), +... +} +``` + +### 2.1 为 Svelte 组件编写单元测试 + +在开发新组件时,您还应该为其编写一套单元测试。测试应该放置在新组件的文件夹中,文件名为 MyAwesomeComponent.test.ts。同样,像上面那样参考其他组件的测试(例如[Textbox.test.ts](https://github.com/gradio-app/gradio/blob/main/js/app/src/components/Textbox/Textbox.test.ts)),并添加尽可能多的单元测试,以测试新组件的不同方面和功能。 + +### 3. 创建新的演示 + +最后一步是在[gradio/demo 文件夹](https://github.com/gradio-app/gradio/tree/main/demo)中创建一个使用新添加的组件的演示。同样,建议参考现有演示。在一个名为 run.py 的文件中编写演示的代码,添加必要的要求和显示应用程序界面的图像。最后添加一个显示其用法的 gif。 +您可以查看为 ColorPicker 创建的[demo](https://github.com/gradio-app/gradio/tree/main/demo/color_picker),其中以新组件选择的图标和颜色作为输入,并以选择的颜色着色的相同图标作为输出。 + +要测试应用程序: + +- 在终端上运行 `python path/demo/run.py`,它会在地址 [http://localhost:7860](http://localhost:7860) 启动后端; +- 在另一个终端上,运行 `pnpm dev` 以在 [http://localhost:9876](http://localhost:9876) 上启动具有热重新加载功能的前端。 + +## 结论 + +在本指南中,我们展示了将新组件添加到 Gradio 是多么简单,逐步介绍了如何添加 ColorPicker 组件。要了解更多细节,可以参考 PR:[#1695](https://github.com/gradio-app/gradio/pull/1695). diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/custom-interpretations-with-blocks.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/custom-interpretations-with-blocks.md new file mode 100644 index 0000000000000000000000000000000000000000..173b5b1b82ac2b4d52c8d2910f4cba1fcdba2b08 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/custom-interpretations-with-blocks.md @@ -0,0 +1,181 @@ +# 使用 Blocks 进行自定义机器学习解释 + +Tags: INTERPRETATION, SENTIMENT ANALYSIS + +**前提条件**: 此指南要求您了解 Blocks 和界面的解释功能。请确保[首先阅读 Blocks 指南](https://gradio.app/quickstart/#blocks-more-flexibility-and-control)以及[高级界面功能指南](/advanced-interface-features#interpreting-your-predictions)的解释部分。 + +## 简介 + +如果您有使用界面类的经验,那么您就知道解释机器学习模型的预测有多么容易,只需要将 `interpretation` 参数设置为 "default" 或 "shap" 即可。 + +您可能想知道是否可以将同样的解释功能添加到使用 Blocks API 构建的应用程序中。不仅可以做到,而且 Blocks 的灵活性还可以以不可能使用界面来显示解释的方式! + +本指南将展示如何: + +1. 在 Blocks 应用程序中重新创建界面的解释功能的行为。 +2. 自定义 Blocks 应用程序中的解释显示方式。 + +让我们开始吧! + +## 设置 Blocks 应用程序 + +让我们使用 Blocks API 构建一款情感分类应用程序。该应用程序将以文本作为输入,并输出此文本表达负面或正面情感的概率。我们会有一个单独的输入 `Textbox` 和一个单独的输出 `Label` 组件。以下是应用程序的代码以及应用程序本身。 + +```python +import gradio as gr +from transformers import pipeline + +sentiment_classifier = pipeline("text-classification", return_all_scores=True) + +def classifier(text): + pred = sentiment_classifier(text) + return {p["label"]: p["score"] for p in pred[0]} + +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + input_text = gr.Textbox(label="Input Text") + with gr.Row(): + classify = gr.Button("Classify Sentiment") + with gr.Column(): + label = gr.Label(label="Predicted Sentiment") + + classify.click(classifier, input_text, label) +demo.launch() +``` + + + +## 向应用程序添加解释 + +我们的目标是向用户呈现输入中的单词如何 contributed 到模型的预测。 +这将帮助用户理解模型的工作方式,并评估其有效性。 +例如,我们应该期望我们的模型能够将“happy”和“love”这些词与积极的情感联系起来;如果模型没有联系起来,那么这意味着我们在训练过程中出现了错误! + +对于输入中的每个单词,我们将计算模型预测的积极情感如何受该单词的影响。 +一旦我们有了这些 `(word, score)` 对,我们就可以使用 Gradio 将其可视化给用户。 + +[shap](https://shap.readthedocs.io/en/stable/index.html) 库将帮助我们计算 `(word, score)` 对,而 Gradio 将负责将输出显示给用户。 + +以下代码计算 `(word, score)` 对: + +```python +def interpretation_function(text): + explainer = shap.Explainer(sentiment_classifier) + shap_values = explainer([text]) + + # Dimensions are (batch size, text size, number of classes) + # Since we care about positive sentiment, use index 1 + scores = list(zip(shap_values.data[0], shap_values.values[0, :, 1])) + # Scores contains (word, score) pairs + + + # Format expected by gr.components.Interpretation + return {"original": text, "interpretation": scores} +``` + +现在,我们所要做的就是添加一个按钮,在单击后运行此函数。 +为了显示解释,我们将使用 `gr.components.Interpretation`。 +这将使输入中的每个单词变成红色或蓝色。 +如果它有助于积极情感,则为红色,如果它有助于负面情感,则为蓝色。 +这就是界面如何显示文本的解释输出。 + +```python +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + input_text = gr.Textbox(label="Input Text") + with gr.Row(): + classify = gr.Button("Classify Sentiment") + interpret = gr.Button("Interpret") + with gr.Column(): + label = gr.Label(label="Predicted Sentiment") + with gr.Column(): + interpretation = gr.components.Interpretation(input_text) + classify.click(classifier, input_text, label) + interpret.click(interpretation_function, input_text, interpretation) + +demo.launch() +``` + + + +## 自定义解释的显示方式 + +`gr.components.Interpretation` 组件以很好的方式显示单个单词如何 contributed 到情感预测,但是如果我们还想显示分数本身,怎么办呢? + +一种方法是生成一个条形图,其中单词在水平轴上,条形高度对应 shap 得分。 + +我们可以通过修改我们的 `interpretation_function` 来执行此操作,以同时返回一个 matplotlib 条形图。我们将在单独的选项卡中使用 'gr.Plot' 组件显示它。 + +这是解释函数的外观: + +```python +def interpretation_function(text): + explainer = shap.Explainer(sentiment_classifier) + shap_values = explainer([text]) + # Dimensions are (batch size, text size, number of classes) + # Since we care about positive sentiment, use index 1 + scores = list(zip(shap_values.data[0], shap_values.values[0, :, 1])) + + scores_desc = sorted(scores, key=lambda t: t[1])[::-1] + + # Filter out empty string added by shap + scores_desc = [t for t in scores_desc if t[0] != ""] + + fig_m = plt.figure() + + # Select top 5 words that contribute to positive sentiment + plt.bar(x=[s[0] for s in scores_desc[:5]], + height=[s[1] for s in scores_desc[:5]]) + plt.title("Top words contributing to positive sentiment") + plt.ylabel("Shap Value") + plt.xlabel("Word") + return {"original": text, "interpretation": scores}, fig_m +``` + +以下是应用程序代码: + +```python +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + input_text = gr.Textbox(label="Input Text") + with gr.Row(): + classify = gr.Button("Classify Sentiment") + interpret = gr.Button("Interpret") + with gr.Column(): + label = gr.Label(label="Predicted Sentiment") + with gr.Column(): + with gr.Tabs(): + with gr.TabItem("Display interpretation with built-in component"): + interpretation = gr.components.Interpretation(input_text) + with gr.TabItem("Display interpretation with plot"): + interpretation_plot = gr.Plot() + + classify.click(classifier, input_text, label) + interpret.click(interpretation_function, input_text, [interpretation, interpretation_plot]) + +demo.launch() +``` + +demo 在这里 ! + + + +## Beyond Sentiment Classification (超越情感分类) + +尽管到目前为止我们已经集中讨论了情感分类,但几乎可以为任何机器学习模型添加解释。 +输出必须是 `gr.Image` 或 `gr.Label`,但输入几乎可以是任何内容 (`gr.Number`, `gr.Slider`, `gr.Radio`, `gr.Image`)。 + +这是一个使用 Blocks 构建的图像分类模型解释演示: + + + +## 结语 + +我们深入地探讨了解释的工作原理以及如何将其添加到您的 Blocks 应用程序中。 + +我们还展示了 Blocks API 如何让您控制解释在应用程序中的可视化方式。 + +添加解释是使您的用户了解和信任您的模型的有用方式。现在,您拥有了将其添加到所有应用程序所需的所有工具! diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/developing-faster-with-reload-mode.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/developing-faster-with-reload-mode.md new file mode 100644 index 0000000000000000000000000000000000000000..d4a6bdde87f57749b7fc30d4f34dc0332ac880f6 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/developing-faster-with-reload-mode.md @@ -0,0 +1,144 @@ +# 通过自动重载实现更快的开发 + +**先决条件**:本指南要求您了解块的知识。请确保[先阅读块指南](https://gradio.app/quickstart/#blocks-more-flexibility-and-control)。 + +本指南介绍了自动重新加载、在 Python IDE 中重新加载以及在 Jupyter Notebooks 中使用 gradio 的方法。 + +## 为什么要使用自动重载? + +当您构建 Gradio 演示时,特别是使用 Blocks 构建时,您可能会发现反复运行代码以测试更改很麻烦。 + +为了更快速、更便捷地编写代码,我们已经简化了在 **Python IDE**(如 VS Code、Sublime Text、PyCharm 等)中开发或从终端运行 Python 代码时“重新加载”Gradio 应用的方式。我们还开发了一个类似的“魔法命令”,使您可以更快速地重新运行单元格,如果您使用 Jupyter Notebooks(或类似的环境,如 Colab)的话。 + +这个简短的指南将涵盖这两种方法,所以无论您如何编写 Python 代码,您都将知道如何更快地构建 Gradio 应用程序。 + +## Python IDE 重载 🔥 + +如果您使用 Python IDE 构建 Gradio Blocks,那么代码文件(假设命名为 `run.py`)可能如下所示: + +```python +import gradio as gr + +with gr.Blocks() as demo: + gr.Markdown("# 来自Gradio的问候!") + inp = gr.Textbox(placeholder="您叫什么名字?") + out = gr.Textbox() + + inp.change(fn=lambda x: f"欢迎,{x}!", + inputs=inp, + outputs=out) + +if __name__ == "__main__": + demo.launch() +``` + +问题在于,每当您想要更改布局、事件或组件时,都必须通过编写 `python run.py` 来关闭和重新运行应用程序。 + +而不是这样做,您可以通过更改 1 个单词来以**重新加载模式**运行代码:将 `python` 更改为 `gradio`: + +在终端中运行 `gradio run.py`。就是这样! + +现在,您将看到类似于这样的内容: + +```bash +Launching in *reload mode* on: http://127.0.0.1:7860 (Press CTRL+C to quit) + +Watching... + +WARNING: The --reload flag should not be used in production on Windows. +``` + +这里最重要的一行是 `正在观察 ...`。这里发生的情况是 Gradio 将观察 `run.py` 文件所在的目录,如果文件发生更改,它将自动为您重新运行文件。因此,您只需专注于编写代码,Gradio 演示将自动刷新 🥳 + +⚠️ 警告:`gradio` 命令不会检测传递给 `launch()` 方法的参数,因为在重新加载模式下从未调用 `launch()` 方法。例如,设置 `launch()` 中的 `auth` 或 `show_error` 不会在应用程序中反映出来。 + +当您使用重新加载模式时,请记住一件重要的事情:Gradio 专门查找名为 `demo` 的 Gradio Blocks/Interface 演示。如果您将演示命名为其他名称,您需要在代码中的第二个参数中传入演示的 FastAPI 应用程序的名称。对于 Gradio 演示,可以使用 `.app` 属性访问 FastAPI 应用程序。因此,如果您的 `run.py` 文件如下所示: + +```python +import gradio as gr + +with gr.Blocks() as my_demo: + gr.Markdown("# 来自Gradio的问候!") + inp = gr.Textbox(placeholder="您叫什么名字?") + out = gr.Textbox() + + inp.change(fn=lambda x: f"欢迎,{x}!", + inputs=inp, + outputs=out) + +if __name__ == "__main__": + my_demo.launch() +``` + +那么您可以这样启动它:`gradio run.py my_demo.app`。 + +🔥 如果您的应用程序接受命令行参数,您也可以传递它们。下面是一个例子: + +```python +import gradio as gr +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument("--name", type=str, default="User") +args, unknown = parser.parse_known_args() + +with gr.Blocks() as demo: + gr.Markdown(f"# 欢迎 {args.name}!") + inp = gr.Textbox() + out = gr.Textbox() + + inp.change(fn=lambda x: x, inputs=inp, outputs=out) + +if __name__ == "__main__": + demo.launch() +``` + +您可以像这样运行它:`gradio run.py --name Gretel` + +作为一个小提示,只要更改了 `run.py` 源代码或 Gradio 源代码,自动重新加载就会发生。这意味着如果您决定[为 Gradio 做贡献](https://github.com/gradio-app/gradio/blob/main/CONTRIBUTING.md),这将非常有用 ✅ + +## Jupyter Notebook 魔法命令🔮 + +如果您使用 Jupyter Notebooks(或 Colab Notebooks 等)进行开发,我们也为您提供了一个解决方案! + +我们开发了一个 **magic command 魔法命令**,可以为您创建和运行一个 Blocks 演示。要使用此功能,在笔记本顶部加载 gradio 扩展: + +`%load_ext gradio` + +然后,在您正在开发 Gradio 演示的单元格中,只需在顶部写入魔法命令**`%%blocks`**,然后像平常一样编写布局和组件: + +```py +%%blocks + +import gradio as gr + +gr.Markdown("# 来自Gradio的问候!") +inp = gr.Textbox(placeholder="您叫什么名字?") +out = gr.Textbox() + +inp.change(fn=lambda x: f"欢迎,{x}!", + inputs=inp, + outputs=out) +``` + +请注意: + +- 您不需要放置样板代码 `with gr.Blocks() as demo:` 和 `demo.launch()` — Gradio 会自动为您完成! + +- 每次重新运行单元格时,Gradio 都将在相同的端口上重新启动您的应用程序,并使用相同的底层网络服务器。这意味着您将比正常重新运行单元格更快地看到变化。 + +下面是在 Jupyter Notebook 中的示例: + +![](https://i.ibb.co/nrszFws/Blocks.gif) + +🪄这在 colab 笔记本中也适用![这是一个 colab 笔记本](https://colab.research.google.com/drive/1jUlX1w7JqckRHVE-nbDyMPyZ7fYD8488?authuser=1#scrollTo=zxHYjbCTTz_5),您可以在其中看到 Blocks 魔法效果。尝试进行一些更改并重新运行带有 Gradio 代码的单元格! + +Notebook Magic 现在是作者构建 Gradio 演示的首选方式。无论您如何编写 Python 代码,我们都希望这两种方法都能为您提供更好的 Gradio 开发体验。 + +--- + +## 下一步 + +既然您已经了解了如何使用 Gradio 快速开发,请开始构建自己的应用程序吧! + +如果你正在寻找灵感,请尝试浏览其他人用 Gradio 构建的演示,[浏览 Hugging Face Spaces](http://hf.space/) 🤗 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/how-to-use-3D-model-component.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/how-to-use-3D-model-component.md new file mode 100644 index 0000000000000000000000000000000000000000..d2990cc676407dcde6c1e0afdf74c6701309bfdb --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/how-to-use-3D-model-component.md @@ -0,0 +1,72 @@ +# 如何使用 3D 模型组件 + +相关空间:https://huggingface.co/spaces/dawood/Model3D, https://huggingface.co/spaces/radames/PIFu-Clothed-Human-Digitization, https://huggingface.co/spaces/radames/dpt-depth-estimation-3d-obj +标签:VISION, IMAGE + +## 介绍 + +机器学习中的 3D 模型越来越受欢迎,并且是一些最有趣的演示实验。使用 `gradio`,您可以轻松构建您的 3D 图像模型的演示,并与任何人分享。Gradio 3D 模型组件接受 3 种文件类型,包括:_.obj_,_.glb_ 和 _.gltf_。 + +本指南将向您展示如何使用几行代码构建您的 3D 图像模型的演示;像下面这个示例一样。点击、拖拽和缩放来玩转 3D 对象: + + + +### 先决条件 + +确保已经[安装](https://gradio.app/quickstart)了 `gradio` Python 包。 + +## 查看代码 + +让我们来看看如何创建上面的最简界面。在这种情况下,预测函数将只返回原始的 3D 模型网格,但您可以更改此函数以在您的机器学习模型上运行推理。我们将在下面看更复杂的示例。 + +```python +import gradio as gr + +def load_mesh(mesh_file_name): + return mesh_file_name + +demo = gr.Interface( + fn=load_mesh, + inputs=gr.Model3D(), + outputs=gr.Model3D(clear_color=[0.0, 0.0, 0.0, 0.0], label="3D Model"), + examples=[ + ["files/Bunny.obj"], + ["files/Duck.glb"], + ["files/Fox.gltf"], + ["files/face.obj"], + ], + cache_examples=True, +) + +demo.launch() +``` + +让我们来解析上面的代码: + +`load_mesh`:这是我们的“预测”函数,为简单起见,该函数将接收 3D 模型网格并返回它。 + +创建界面: + +- `fn`:当用户点击提交时使用的预测函数。在我们的例子中,它是 `load_mesh` 函数。 +- `inputs`:创建一个 model3D 输入组件。输入是一个上传的文件,作为{str}文件路径。 +- `outputs`:创建一个 model3D 输出组件。输出组件也期望一个文件作为{str}文件路径。 + - `clear_color`:这是 3D 模型画布的背景颜色。期望 RGBa 值。 + - `label`:出现在组件左上角的标签。 +- `examples`:3D 模型文件的列表。3D 模型组件可以接受*.obj*,*.glb*和*.gltf*文件类型。 +- `cache_examples`:保存示例的预测输出,以节省推理时间。 + +## 探索更复杂的 Model3D 演示 + +下面是一个使用 DPT 模型预测图像深度,然后使用 3D 点云创建 3D 对象的演示。查看[code.py](https://huggingface.co/spaces/radames/dpt-depth-estimation-3d-obj/blob/main/app.py)文件,了解代码和模型预测函数。 + + +下面是一个使用 PIFu 模型将穿着衣物的人的图像转换为 3D 数字化模型的演示。查看[spaces.py](https://huggingface.co/spaces/radames/PIFu-Clothed-Human-Digitization/blob/main/PIFu/spaces.py)文件,了解代码和模型预测函数。 + + + +--- + +搞定!这就是构建 Model3D 模型界面所需的所有代码。以下是一些您可能会发现有用的参考资料: + +- Gradio 的[“入门指南”](https://gradio.app/getting_started/) +- 第一个[3D 模型演示](https://huggingface.co/spaces/dawood/Model3D)和[完整代码](https://huggingface.co/spaces/dawood/Model3D/tree/main)(在 Hugging Face Spaces 上) diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/named-entity-recognition.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/named-entity-recognition.md new file mode 100644 index 0000000000000000000000000000000000000000..0c1535ff6486861d2b9a59707f0c598a1928120a --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/named-entity-recognition.md @@ -0,0 +1,80 @@ +# 命名实体识别 (Named-Entity Recognition) + +相关空间:https://huggingface.co/spaces/rajistics/biobert_ner_demo,https://huggingface.co/spaces/abidlabs/ner,https://huggingface.co/spaces/rajistics/Financial_Analyst_AI +标签:NER,TEXT,HIGHLIGHT + +## 简介 + +命名实体识别(NER)又称为标记分类或文本标记,它的任务是对一个句子进行分类,将每个单词(或 "token")归为不同的类别,比如人名、地名或词性等。 + +例如,给定以下句子: + +> 芝加哥有巴基斯坦餐厅吗? + +命名实体识别算法可以识别出: + +- "Chicago" as a **location** +- "Pakistani" as an **ethnicity** + +等等。 + +使用 `gradio`(特别是 `HighlightedText` 组件),您可以轻松构建一个 NER 模型的 Web 演示并与团队分享。 + +这是您将能够构建的一个演示的示例: + +$demo_ner_pipeline + +本教程将展示如何使用预训练的 NER 模型并使用 Gradio 界面部署该模型。我们将展示两种不同的使用 `HighlightedText` 组件的方法--根据您的 NER 模型,可以选择其中任何一种更容易学习的方式! + +### 环境要求 + +确保您已经[安装](/getting_started)了 `gradio` Python 包。您还需要一个预训练的命名实体识别模型。在本教程中,我们将使用 `transformers` 库中的一个模型。 + +### 方法一:实体字典列表 + +许多命名实体识别模型输出的是一个字典列表。每个字典包含一个*实体*,一个 " 起始 " 索引和一个 " 结束 " 索引。这就是 `transformers` 库中的 NER 模型的操作方式。 + +```py +from transformers import pipeline +ner_pipeline = pipeline("ner") +ner_pipeline("芝加哥有巴基斯坦餐厅吗?") +``` + +输出结果: + +```bash +[{'entity': 'I-LOC', + 'score': 0.9988978, + 'index': 2, + 'word': 'Chicago', + 'start': 5, + 'end': 12}, + {'entity': 'I-MISC', + 'score': 0.9958592, + 'index': 5, + 'word': 'Pakistani', + 'start': 22, + 'end': 31}] +``` + +如果您有这样的模型,将其连接到 Gradio 的 `HighlightedText` 组件非常简单。您只需要将这个**实体列表**与**原始文本**以字典的形式传递给模型,其中键分别为 `"entities"` 和 `"text"`。 + +下面是一个完整的示例: + +$code_ner_pipeline +$demo_ner_pipeline + +### 方法二:元组列表 + +将数据传递给 `HighlightedText` 组件的另一种方法是使用元组列表。每个元组的第一个元素应该是被归类为特定实体的单词或词组。第二个元素应该是实体标签(如果不需要标签,则为 `None`)。`HighlightedText` 组件会自动组合单词和标签来显示实体。 + +在某些情况下,这比第一种方法更简单。下面是一个使用 Spacy 的词性标注器演示此方法的示例: + +$code_text_analysis +$demo_text_analysis + +--- + +到此为止!您已经了解了为您的 NER 模型构建基于 Web 的图形用户界面所需的全部内容。 + +有趣的提示:只需在 `launch()` 中设置 `share=True`,即可立即与其他人分享您的 NER 演示。 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/real-time-speech-recognition.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/real-time-speech-recognition.md new file mode 100644 index 0000000000000000000000000000000000000000..24ca567161b982b1dc0b4d79083209bb6e1bbbb3 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/real-time-speech-recognition.md @@ -0,0 +1,227 @@ +# 实时语音识别 + +Related spaces: https://huggingface.co/spaces/abidlabs/streaming-asr-paused, https://huggingface.co/spaces/abidlabs/full-context-asr +Tags: ASR, SPEECH, STREAMING + +## 介绍 + +自动语音识别(ASR)是机器学习中非常重要且蓬勃发展的领域,它将口语转换为文本。ASR 算法几乎在每部智能手机上都有运行,并越来越多地嵌入到专业工作流程中,例如护士和医生的数字助手。由于 ASR 算法是直接面向客户和最终用户设计的,因此在面对各种语音模式(不同的口音、音调和背景音频条件)时,验证它们的行为是否符合预期非常重要。 + +使用 `gradio`,您可以轻松构建一个 ASR 模型的演示,并与测试团队共享,或通过设备上的麦克风进行自行测试。 + +本教程将展示如何使用预训练的语音识别模型并在 Gradio 界面上部署。我们将从一个 **full-context 全文**模型开始,其中用户在进行预测之前要说完整段音频。然后,我们将调整演示以使其变为 **streaming 流式**,这意味着音频模型将在您说话时将语音转换为文本。我们创建的流式演示将如下所示(在下方尝试或[在新标签页中打开](https://huggingface.co/spaces/abidlabs/streaming-asr-paused)): + + +实时 ASR 本质上是*有状态的*,即模型的预测结果取决于用户先前说的单词。因此,在本教程中,我们还将介绍如何在 Gradio 演示中使用 **state**。 + +### 先决条件 + +确保您已经[安装](/getting_started)了 `gradio` Python 包。您还需要一个预训练的语音识别模型。在本教程中,我们将从两个 ASR 库构建演示: + +- Transformers(为此,`pip install transformers` 和 `pip install torch`)\* DeepSpeech(`pip install deepspeech==0.8.2`) + +确保您至少安装了其中之一,以便您可以跟随本教程操作。如果您尚未安装 `ffmpeg`,请在[系统上下载并安装](https://www.ffmpeg.org/download.html),以便从麦克风处理文件。 + +下面是构建实时语音识别(ASR)应用程序的步骤: + +1. [设置 Transformers ASR 模型](#1-set-up-the-transformers-asr-model) +2. [使用 Transformers 创建一个全文 ASR 演示] + (#2-create-a-full-context-asr-demo-with-transformers) +3. [使用 Transformers 创建一个流式 ASR 演示](#3-create-a-streaming-asr-demo-with-transformers) +4. [使用 DeepSpeech 创建一个流式 ASR 演示](#4-create-a-streaming-asr-demo-with-deepspeech) + +## 1. 设置 Transformers ASR 模型 + +首先,您需要拥有一个 ASR 模型,您可以自己训练,或者需要下载一个预训练模型。在本教程中,我们将使用 Hugging Face 模型的预训练 ASR 模型 `Wav2Vec2`。 + +以下是从 Hugging Face 的 `transformers` 加载 `Wav2Vec2` 的代码: + +```python +from transformers import pipeline +p = pipeline("automatic-speech-recognition") +``` + +就是这样!默认情况下,自动语音识别模型管道会加载 Facebook 的 `facebook/wav2vec2-base-960h` 模型。 + +## 2. 使用 Transformers 创建一个全文 ASR 演示 + +我们将首先创建一个*全文*ASR 演示,其中用户在使用 ASR 模型进行预测之前说完整段音频。使用 Gradio 非常简单,我们只需在上面的 `pipeline` 对象周围创建一个函数。 + +我们将使用 `gradio` 内置的 `Audio` 组件,配置从用户的麦克风接收输入并返回录制音频的文件路径。输出组件将是一个简单的 `Textbox`。 + +```python +import gradio as gr + +def transcribe(audio): + text = p(audio)["text"] + return text + +gr.Interface( + fn=transcribe, + inputs=gr.Audio(source="microphone", type="filepath"), + outputs="text").launch() +``` + +那么这里发生了什么?`transcribe` 函数接受一个参数 `audio`,它是用户录制的音频文件的文件路径。`pipeline` 对象期望一个文件路径,并将其转换为文本,然后返回到前端并在文本框中显示。 + +让我们看看它的效果吧!(录制一段短音频并点击提交,或[在新标签页打开](https://huggingface.co/spaces/abidlabs/full-context-asr)): + + +## 3. 使用 Transformers 创建一个流式 ASR 演示 +太棒了!我们已经构建了一个对短音频剪辑效果良好的 ASR 模型。但是,如果您正在记录较长的音频剪辑,则可能需要一个*流式*界面,即在用户说话时逐句转录音频,而不仅仅在最后一次全部转录。 + +好消息是,我们可以很容易地调整刚刚创建的演示,使其成为流式的,使用相同的 `Wav2Vec2` 模型。 + +最大的变化是我们现在必须引入一个 `state` 参数,它保存到目前为止*转录的音频*。这样,我们只需处理最新的音频块,并将其简单地追加到先前转录的音频中。 + +在向 Gradio 演示添加状态时,您需要完成 3 件事: + +- 在函数中添加 `state` 参数* 在函数末尾返回更新后的 `state`* 在 `Interface` 的 `inputs` 和 `outputs` 中添加 `"state"` 组件 + +以下是代码示例: + +```python +def transcribe(audio, state=""): + text = p(audio)["text"] + state += text + " " + return state, state + +# Set the starting state to an empty string +gr.Interface( + fn=transcribe, + inputs=[ + gr.Audio(source="microphone", type="filepath", streaming=True), + "state" + ], + outputs=[ + "textbox", + "state" + ], + live=True).launch() +``` + +请注意,我们还进行了另一个更改,即我们设置了 `live=True`。这使得 Gradio 接口保持持续运行,因此它可以自动转录音频,而无需用户反复点击提交按钮。 + +让我们看看它的效果(在下方尝试或[在新标签页中打开](https://huggingface.co/spaces/abidlabs/streaming-asr))! + + + +你可能注意到的一件事是,由于音频块非常小,所以转录质量下降了,它们缺乏正确转录所需的上下文。此问题的“hacky”解决方法是简单地增加 `transcribe()` 函数的运行时间,以便处理更长的音频块。我们可以通过在函数中添加 `time.sleep()` 来实现这一点,如下所示(接下来我们将看到一个正确的解决方法) + +```python +from transformers import pipeline +import gradio as gr +import time + +p = pipeline("automatic-speech-recognition") + +def transcribe(audio, state=""): + time.sleep(2) + text = p(audio)["text"] + state += text + " " + return state, state + +gr.Interface( + fn=transcribe, + inputs=[ + gr.Audio(source="microphone", type="filepath", streaming=True), + "state" + ], + outputs=[ + "textbox", + "state" + ], + live=True).launch() +``` + +尝试下面的演示,查看差异(或[在新标签页中打开](https://huggingface.co/spaces/abidlabs/streaming-asr-paused))! + + + +## 4. 使用 DeepSpeech 创建流式 ASR 演示 + +您不仅限于使用 `transformers` 库中的 ASR 模型 - 您可以使用自己的模型或其他库中的模型。`DeepSpeech` 库包含专门用于处理流式音频数据的模型。这些模型在处理流式数据时表现非常好,因为它们能够考虑到先前的音频块在进行预测时产生的影响。 + +深入研究 DeepSpeech 库超出了本指南的范围(可以在[此处查看其优秀的文档](https://deepspeech.readthedocs.io/en/r0.9/)),但是您可以像使用 Transformer ASR 模型一样,使用 DeepSpeech ASR 模型使用类似的方法使用 Gradio。 + +下面是一个完整的示例(在 Linux 上): + +首先通过终端安装 DeepSpeech 库并下载预训练模型: + +```bash +wget https://github.com/mozilla/DeepSpeech/releases/download/v0.8.2/deepspeech-0.8.2-models.pbmm +wget https://github.com/mozilla/DeepSpeech/releases/download/v0.8.2/deepspeech-0.8.2-models.scorer +apt install libasound2-dev portaudio19-dev libportaudio2 libportaudiocpp0 ffmpeg +pip install deepspeech==0.8.2 +``` + +然后,创建与之前相似的 `transcribe()` 函数: + +```python +from deepspeech import Model +import numpy as np + +model_file_path = "deepspeech-0.8.2-models.pbmm" +lm_file_path = "deepspeech-0.8.2-models.scorer" +beam_width = 100 +lm_alpha = 0.93 +lm_beta = 1.18 + +model = Model(model_file_path) +model.enableExternalScorer(lm_file_path) +model.setScorerAlphaBeta(lm_alpha, lm_beta) +model.setBeamWidth(beam_width) + + +def reformat_freq(sr, y): + if sr not in ( + 48000, + 16000, + ): # Deepspeech only supports 16k, (we convert 48k -> 16k) + raise ValueError("Unsupported rate", sr) + if sr == 48000: + y = ( + ((y / max(np.max(y), 1)) * 32767) + .reshape((-1, 3)) + .mean(axis=1) + .astype("int16") + ) + sr = 16000 + return sr, y + + +def transcribe(speech, stream): + _, y = reformat_freq(*speech) + if stream is None: + stream = model.createStream() + stream.feedAudioContent(y) + text = stream.intermediateDecode() + return text, stream + +``` + +然后,如前所述创建一个 Gradio 接口(唯一的区别是返回类型应该是 `numpy` 而不是 `filepath` 以与 DeepSpeech 模型兼容) + +```python +import gradio as gr + +gr.Interface( + fn=transcribe, + inputs=[ + gr.Audio(source="microphone", type="numpy"), + "state" + ], + outputs= [ + "text", + "state" + ], + live=True).launch() +``` + +运行所有这些应该允许您使用一个漂亮的 GUI 部署实时 ASR 模型。尝试一下,看它在您那里运行得有多好。 + +--- + +你已经完成了!这就是构建用于 ASR 模型的基于 Web 的 GUI 所需的所有代码。 + +有趣的提示:您只需在 `launch()` 中设置 `share=True`,即可即时与他人共享 ASR 模型。 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/running-background-tasks.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/running-background-tasks.md new file mode 100644 index 0000000000000000000000000000000000000000..5035be46dbdcb37a91b25bdf3f06a91cccedc39e --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/running-background-tasks.md @@ -0,0 +1,156 @@ +# 运行后台任务 + +Related spaces: https://huggingface.co/spaces/freddyaboulton/gradio-google-forms +Tags: TASKS, SCHEDULED, TABULAR, DATA + +## 简介 + +本指南介绍了如何从 gradio 应用程序中运行后台任务。 +后台任务是在您的应用程序的请求-响应生命周期之外执行的操作,可以是一次性的或定期的。 +后台任务的示例包括定期将数据与外部数据库同步或通过电子邮件发送模型预测报告。 + +## 概述 + +我们将创建一个简单的“Google Forms”风格的应用程序,用于收集 gradio 库的用户反馈。 +我们将使用一个本地 sqlite 数据库来存储数据,但我们将定期将数据库的状态与[HuggingFace Dataset](https://huggingface.co/datasets)同步,以便始终备份我们的用户评论。 +同步将在每 60 秒运行的后台任务中进行。 + +在演示结束时,您将拥有一个完全可工作的应用程序,类似于以下应用程序 : + + + +## 第一步 - 编写数据库逻辑 💾 + +我们的应用程序将存储评论者的姓名,他们对 gradio 给出的评分(1 到 5 的范围),以及他们想要分享的关于该库的任何评论。让我们编写一些代码,创建一个数据库表来存储这些数据。我们还将编写一些函数,以将评论插入该表中并获取最新的 10 条评论。 + +我们将使用 `sqlite3` 库来连接我们的 sqlite 数据库,但 gradio 可以与任何库一起使用。 + +代码如下 : + +```python +DB_FILE = "./reviews.db" +db = sqlite3.connect(DB_FILE) + +# Create table if it doesn't already exist +try: + db.execute("SELECT * FROM reviews").fetchall() + db.close() +except sqlite3.OperationalError: + db.execute( + ''' + CREATE TABLE reviews (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + name TEXT, review INTEGER, comments TEXT) + ''') + db.commit() + db.close() + +def get_latest_reviews(db: sqlite3.Connection): + reviews = db.execute("SELECT * FROM reviews ORDER BY id DESC limit 10").fetchall() + total_reviews = db.execute("Select COUNT(id) from reviews").fetchone()[0] + reviews = pd.DataFrame(reviews, columns=["id", "date_created", "name", "review", "comments"]) + return reviews, total_reviews + + +def add_review(name: str, review: int, comments: str): + db = sqlite3.connect(DB_FILE) + cursor = db.cursor() + cursor.execute("INSERT INTO reviews(name, review, comments) VALUES(?,?,?)", [name, review, comments]) + db.commit() + reviews, total_reviews = get_latest_reviews(db) + db.close() + return reviews, total_reviews +``` + +让我们还写一个函数,在 gradio 应用程序加载时加载最新的评论 : + +```python +def load_data(): + db = sqlite3.connect(DB_FILE) + reviews, total_reviews = get_latest_reviews(db) + db.close() + return reviews, total_reviews +``` + +## 第二步 - 创建 gradio 应用 ⚡ + +现在我们已经定义了数据库逻辑,我们可以使用 gradio 创建一个动态的网页来询问用户的反馈意见! + +使用以下代码段 : + +```python +with gr.Blocks() as demo: + with gr.Row(): + with gr.Column(): + name = gr.Textbox(label="Name", placeholder="What is your name?") + review = gr.Radio(label="How satisfied are you with using gradio?", choices=[1, 2, 3, 4, 5]) + comments = gr.Textbox(label="Comments", lines=10, placeholder="Do you have any feedback on gradio?") + submit = gr.Button(value="Submit Feedback") + with gr.Column(): + data = gr.Dataframe(label="Most recently created 10 rows") + count = gr.Number(label="Total number of reviews") + submit.click(add_review, [name, review, comments], [data, count]) + demo.load(load_data, None, [data, count]) +``` + +## 第三步 - 与 HuggingFace 数据集同步 🤗 + +在第 2 步后我们可以调用 `demo.launch()` 来运行一个完整功能的应用程序。然而,我们的数据将存储在本地机器上。如果 sqlite 文件意外删除,我们将丢失所有评论!让我们将我们的数据备份到 HuggingFace hub 的数据集中。 + +在继续之前,请在[此处](https://huggingface.co/datasets)创建一个数据集。 + +现在,在我们脚本的**顶部**,我们将使用[huggingface hub 客户端库](https://huggingface.co/docs/huggingface_hub/index)连接到我们的数据集并获取最新的备份。 + +```python +TOKEN = os.environ.get('HUB_TOKEN') +repo = huggingface_hub.Repository( + local_dir="data", + repo_type="dataset", + clone_from="", + use_auth_token=TOKEN +) +repo.git_pull() + +shutil.copyfile("./data/reviews.db", DB_FILE) +``` + +请注意,您需要从 HuggingFace 的“设置”选项卡中获取访问令牌,以上代码才能正常工作。在脚本中,通过环境变量安全访问令牌。 + +![access_token](/assets/guides/access_token.png) + +现在,我们将创建一个后台任务,每 60 秒将我们的本地数据库与数据集中的数据同步一次。 +我们将使用[AdvancedPythonScheduler](https://apscheduler.readthedocs.io/en/3.x/)来处理调度。 +然而,这并不是唯一可用的任务调度库。请随意使用您熟悉的任何库。 + +备份数据的函数如下 : + +```python +from apscheduler.schedulers.background import BackgroundScheduler + +def backup_db(): + shutil.copyfile(DB_FILE, "./data/reviews.db") + db = sqlite3.connect(DB_FILE) + reviews = db.execute("SELECT * FROM reviews").fetchall() + pd.DataFrame(reviews).to_csv("./data/reviews.csv", index=False) + print("updating db") + repo.push_to_hub(blocking=False, commit_message=f"Updating data at {datetime.datetime.now()}") + + +scheduler = BackgroundScheduler() +scheduler.add_job(func=backup_db, trigger="interval", seconds=60) +scheduler.start() +``` + +## 第四步(附加)- 部署到 HuggingFace Spaces + +您可以使用 HuggingFace [Spaces](https://huggingface.co/spaces) 平台免费部署这个应用程序 ✨ + +如果您之前没有使用过 Spaces,请查看[此处](/using_hugging_face_integrations)的先前指南。 +您将需要将 `HUB_TOKEN` 环境变量作为指南中的一个秘密使用。 + +## 结论 + +恭喜!您知道如何在您的 gradio 应用程序中按计划运行后台任务⏲️。 + +在 Spaces 上运行的应用程序可在[此处](https://huggingface.co/spaces/freddyaboulton/gradio-google-forms)查看。 +完整的代码在[此处](https://huggingface.co/spaces/freddyaboulton/gradio-google-forms/blob/main/app.py)。 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/running-gradio-on-your-web-server-with-nginx.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/running-gradio-on-your-web-server-with-nginx.md new file mode 100644 index 0000000000000000000000000000000000000000..d8c509a8294b11f3a485aae19e31fb94e2ee7cf6 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/running-gradio-on-your-web-server-with-nginx.md @@ -0,0 +1,78 @@ +# 在 Web 服务器上使用 Nginx 运行 Gradio 应用 + +标签:部署,Web 服务器,Nginx + +## 介绍 + +Gradio 是一个 Python 库,允许您快速创建可定制的 Web 应用程序,用于机器学习模型和数据处理流水线。Gradio 应用可以免费部署在[Hugging Face Spaces](https://hf.space)上。 + +然而,在某些情况下,您可能希望在自己的 Web 服务器上部署 Gradio 应用。您可能已经在使用[Nginx](https://www.nginx.com/)作为高性能的 Web 服务器来提供您的网站(例如 `https://www.example.com`),并且您希望将 Gradio 附加到网站的特定子路径上(例如 `https://www.example.com/gradio-demo`)。 + +在本指南中,我们将指导您在自己的 Web 服务器上的 Nginx 后面运行 Gradio 应用的过程,以实现此目的。 + +**先决条件** + +1. 安装了 [Nginx 的 Linux Web 服务器](https://www.nginx.com/blog/setting-up-nginx/) 和 [Gradio](/quickstart) 库 + +2. 在 Web 服务器上将 Gradio 应用保存为 Python 文件 + +## 编辑 Nginx 配置文件 + +1. 首先编辑 Web 服务器上的 Nginx 配置文件。默认情况下,文件位于:`/etc/nginx/nginx.conf` + +在 `http` 块中,添加以下行以从单独的文件包含服务器块配置: + +```bash +include /etc/nginx/sites-enabled/*; +``` + +2. 在 `/etc/nginx/sites-available` 目录中创建一个新文件(如果目录不存在则创建),文件名表示您的应用,例如:`sudo nano /etc/nginx/sites-available/my_gradio_app` + +3. 将以下内容粘贴到文件编辑器中: + +```bash +server { + listen 80; + server_name example.com www.example.com; # 将此项更改为您的域名 + + location /gradio-demo/ { # 如果要在不同路径上提供Gradio应用,请更改此项 + proxy_pass http://127.0.0.1:7860/; # 如果您的Gradio应用将在不同端口上运行,请更改此项 + proxy_redirect off; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + } +} +``` + +## 在 Web 服务器上运行 Gradio 应用 + +1. 在启动 Gradio 应用之前,您需要将 `root_path` 设置为与 Nginx 配置中指定的子路径相同。这对于 Gradio 在除域的根路径之外的任何子路径上运行是必要的。 + +以下是一个具有自定义 `root_path` 的简单示例 Gradio 应用: + +```python +import gradio as gr +import time + +def test(x): + time.sleep(4) + return x + +gr.Interface(test, "textbox", "textbox").queue().launch(root_path="/gradio-demo") +``` + +2. 通过键入 `tmux` 并按回车键(可选)启动 `tmux` 会话 + +推荐在 `tmux` 会话中运行 Gradio 应用,以便可以轻松地在后台运行它 + +3. 然后,启动您的 Gradio 应用。只需输入 `python`,后跟您的 Gradio Python 文件的名称。默认情况下,应用将在 `localhost:7860` 上运行,但如果它在其他端口上启动,您需要更新上面的 Nginx 配置文件。 + +## 重新启动 Nginx + +1. 如果您在 tmux 会话中,请通过键入 CTRL + B(或 CMD + B),然后按下 "D" 键来退出。 + +2. 最后,通过运行 `sudo systemctl restart nginx` 重新启动 nginx。 + +就是这样!如果您在浏览器中访问 `https://example.com/gradio-demo`,您应该能够看到您的 Gradio 应用在那里运行。 diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/setting-up-a-demo-for-maximum-performance.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/setting-up-a-demo-for-maximum-performance.md new file mode 100644 index 0000000000000000000000000000000000000000..91dcbb946ddd40f539fd1d3fd77e74f0db07d06e --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/setting-up-a-demo-for-maximum-performance.md @@ -0,0 +1,110 @@ +# 最佳性能的演示 (Maximum Performance) + +Tags: QUEUE, PERFORMANCE + +假设您的 Gradio 演示在社交媒体上迅速走红-有很多用户同时尝试,您希望为用户提供最佳体验,换句话说就是尽量减少每个用户等待队列中查看他们的预测结果的时间。 + +如何配置您的 Gradio 演示以处理最大流量?在本指南中,我们将深入介绍 Gradio 的 `.queue()` 方法以及其他相关配置的一些参数,并讨论如何设置这些参数,以便您可以同时为大量用户提供服务,并使延迟保持最小。 + +这是一份高级指南,请确保您已经了解 Gradio 的基础知识,例如[如何创建和启动 Gradio 界面](https://gradio.app/quickstart/)。本指南中的大部分信息对于您是将演示托管在[Hugging Face Spaces](https://hf.space)还是在自己的服务器上都是相关的。 + +## 启用 Gradio 的队列系统 + +默认情况下,Gradio 演示不使用队列,而是通过 POST 请求将预测请求发送到托管 Gradio 服务器和 Python 代码的服务器。然而,常规 POST 请求有两个重要的限制: + +(1) 它们会超时-大多数浏览器在 POST 请求在很短的时间(例如 1 分钟)内没有响应时会引发超时错误。 +如果推理功能运行时间超过 1 分钟,或者当同时有很多人尝试您的演示时,增加了延迟。 + +(2) 它们不允许 Gradio 演示和 Gradio 服务器之间的双向通信。这意味着,例如,您无法实时获得您的预测完成所需的预计时间。 + +为了解决这些限制,任何 Gradio 应用都可以通过在 Interface 或 Blocks 启动之前添加 `.queue()` 来转换为使用 **websockets**。以下是一个示例: + +```py +app = gr.Interface(lambda x:x, "image", "image") +app.queue() # <-- Sets up a queue with default parameters +app.launch() +``` + +在上面的演示 `app` 中,预测现在将通过 websocket 发送。 +与 POST 请求不同,websocket 不会超时并且允许双向通信。在 Gradio 服务器上,设置了一个 **queue 队列**,它将每个到达的请求添加到列表中。当一个工作线程可用时,第一个可用的请求将传递给工作线程用于预测。预测完成后,队列通过 websocket 将预测结果发送回调用该预测的特定 Gradio 用户。 + +注意:如果您将 Gradio 应用程序托管在[Hugging Face Spaces](https://hf.space),队列已经 **enabled by default 默认启用**。您仍然可以手动调用 `.queue()` 方法以配置下面描述的队列参数。 + +## 队列参数 (Queuing Parameters) + +有几个参数可用于配置队列,并帮助减少延迟。让我们逐个介绍。 + +### `concurrency_count` 参数 + +我们将首先探讨 `queue()` 的 `concurrency_count` 参数。该参数用于设置在 Gradio 服务器中将并行处理请求的工作线程数。默认情况下,此参数设置为 `1`,但增加此参数可以**线性增加服务器处理请求的能力**。 + +那为什么不将此参数设置得更高呢?请记住,由于请求是并行处理的,每个请求将消耗内存用于存储处理的数据和权重。这意味着,如果您将 `concurrency_count` 设置得过高,可能会导致内存溢出错误。如果 `concurrency_count` 过高,也可能出现不断切换不同工作线程的成本导致收益递减的情况。 + +**推荐**:将 `concurrency_count` 参数增加到能够获得性能提升或达到机器内存限制为止。您可以[在此处了解有关 Hugging Face Spaces 机器规格的信息](https://huggingface.co/docs/hub/spaces-overview)。 + +_注_:还有第二个参数可控制 Gradio 能够生成的*总*线程数,无论是否启用队列。这是 `launch()` 方法中的 `max_threads` 参数。当您增加 `queue()` 中的 `concurrency_count` 参数时,此参数也会自动增加。然而,在某些情况下,您可能希望手动增加此参数,例如,如果未启用队列。 + +### `max_size` 参数 + +减少等待时间的更直接的方法是防止过多的人加入队列。您可以使用 `queue()` 的 `max_size` 参数设置队列处理的最大请求数。如果请求在队列已经达到最大大小时到达,它将被拒绝加入队列,并且用户将收到一个错误提示,指示队列已满,请重试。默认情况下,`max_size=None`,表示没有限制可以加入队列的用户数量。 + +矛盾地,设置 `max_size` 通常可以改善用户体验,因为它可以防止用户因等待时间过长而被打消兴趣。对您的演示更感兴趣和投入的用户将继续尝试加入队列,并且能够更快地获得他们的结果。 + +**推荐**:为了获得更好的用户体验,请设置一个合理的 `max_size`,该值基于用户对预测所愿意等待多长时间的预期。 + +### `max_batch_size` 参数 + +增加 Gradio 演示的并行性的另一种方法是编写能够接受**批次**输入的函数。大多数深度学习模型可以比处理单个样本更高效地处理批次样本。 + +如果您编写的函数可以处理一批样本,Gradio 将自动将传入的请求批量处理并作为批量样本传递给您的函数。您需要将 `batch` 设置为 `True`(默认为 `False`),并根据函数能够处理的最大样本数设置 `max_batch_size`(默认为 `4`)。这两个参数可以传递给 `gr.Interface()` 或 Blocks 中的事件,例如 `.click()`。 + +虽然设置批次在概念上与使工作线程并行处理请求类似,但对于深度学习模型而言,它通常比设置 `concurrency_count` 更快。缺点是您可能需要稍微调整函数以接受批次样本而不是单个样本。 + +以下是一个不接受批次输入的函数的示例-它一次处理一个输入: + +```py +import time + +def trim_words(word, length): + return w[:int(length)] + +``` + +这是相同函数的重写版本,接受一批样本: + +```py +import time + +def trim_words(words, lengths): + trimmed_words = [] + for w, l in zip(words, lengths): + trimmed_words.append(w[:int(l)]) + return [trimmed_words] + +``` + +# Setup 安装和设置 + +**建议**:如果可能的话,请编写接受样本批次的函数,然后将 `batch` 设置为 `True`,并根据计算机的内存限制将 `max_batch_size` 设置得尽可能高。如果将 `max_batch_size` 设置为尽可能高,很可能需要将 `concurrency_count` 重新设置为 `1`,因为您将没有足够的内存来同时运行多个工作线程。 + +### `api_open` 参数 + +在创建 Gradio 演示时,您可能希望将所有流量限制为通过用户界面而不是通过自动为您的 Gradio 演示创建的[编程 API](/sharing_your_app/#api-page)进行。这一点很重要,因为当人们通过编程 API 进行请求时,可能会绕过正在等待队列中的用户并降低这些用户的体验。 + +**建议**:在演示中将 `queue()` 中的 `api_open` 参数设置为 `False`,以防止程序化请求。 + +### 升级硬件(GPU,TPU 等) + +如果您已经完成了以上所有步骤,但您的演示仍然不够快,您可以升级模型运行的硬件。将模型从 CPU 上运行切换到 GPU 上运行,深度学习模型的推理时间通常会提高 10 倍到 50 倍。 + +在 Hugging Face Spaces 上升级硬件非常简单。只需单击自己的 Space 中的 "Settings" 选项卡,然后选择所需的 Space 硬件。 + +![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/hub/spaces-gpu-settings.png) + +虽然您可能需要调整部分机器学习推理代码以在 GPU 上运行(如果您使用 PyTorch,[这里有一个方便的指南](https://cnvrg.io/pytorch-cuda/)),但 Gradio 对于硬件选择是完全无感知的,无论您是使用 CPU、GPU、TPU 还是其他任何硬件,都可以正常工作! + +注意:您的 GPU 内存与 CPU 内存不同,因此如果您升级了硬件,您可能需要调整上面描述的`concurrency_count` 参数的值。 + +## 结论 + +祝贺您!您已经了解如何设置 Gradio 演示以获得最佳性能。祝您在下一个病毒式演示中好运! diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/theming-guide.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/theming-guide.md new file mode 100644 index 0000000000000000000000000000000000000000..89515a61bb02341a8ea7903bd54a023a2b9bf89d --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/theming-guide.md @@ -0,0 +1,408 @@ +# 主题 Theming + +Tags: THEMES + +## 介绍 + +Gradio 具有内置的主题引擎,可让您自定义应用的外观和感觉。您可以选择各种主题,或者创建自己的主题。要这样做,请将 `theme=` kwarg 传递给 `Blocks` 或 `Interface` 构造函数。例如: + +```python +with gr.Blocks(theme=gr.themes.Soft()) as demo: + ... +``` + +
+ +
+ +Gradio 带有一组预构建的主题,您可以从 `gr.themes.*` 中加载这些主题。这些主题包括: + +- `gr.themes.Base()` +- `gr.themes.Default()` +- `gr.themes.Glass()` +- `gr.themes.Monochrome()` +- `gr.themes.Soft()` + +这些主题为数百个 CSS 变量设置了值。您可以使用预构建的主题作为自定义主题的起点,也可以从头开始创建自己的主题。让我们看看每种方法。 + +## 使用主题构建器 + +使用主题构建器构建主题最简单。要在本地启动主题构建器,请运行以下代码: + +```python +import gradio as gr + +gr.themes.builder() +``` + +$demo_theme_builder + +您可以使用上面的 Spaces 上运行的 Theme Builder,但通过 `gr.themes.builder()` 在本地启动时运行速度更快。 + +在 Theme Builder 中编辑值时,应用程序将实时预览更新。您可以下载生成的主题代码,以便在任何 Gradio 应用程序中使用它。 + +在本指南的其余部分,我们将介绍如何以编程方式构建主题。 + +## 通过构造函数扩展主题 + +尽管每个主题都有数百个 CSS 变量,但大多数这些变量的值都是从 8 个核心变量中获取的,可以通过每个预构建主题的构造函数设置这些变量。通过修改这 8 个参数的值,您可以快速更改应用程序的外观和感觉。 + +### 核心颜色 + +前 3 个构造函数参数设置主题的颜色,并且是 `gradio.themes.Color` 对象。在内部,这些 Color 对象包含单个色调的调色板的亮度值,范围从 50,100,200...,800,900,950。其他 CSS 变量是从这 3 种颜色派生的。 + +3 个颜色构造函数参数是: + +- `primary_hue`:这是主题中的主色。在默认主题中,此值设置为 `gradio.themes.colors.orange`。 +- `secondary_hue`:这是主题中用于辅助元素的颜色。在默认主题中,此值设置为 `gradio.themes.colors.blue`。 +- `neutral_hue`:这是主题中用于文本和其他中性元素的颜色。在默认主题中,此值设置为 `gradio.themes.colors.gray`。 + +您可以使用字符串快捷方式修改这些值,例如 + +```python +with gr.Blocks(theme=gr.themes.Default(primary_hue="red", secondary_hue="pink")) as demo: + ... +``` + +或者直接使用 `Color` 对象,如下所示: + +```python +with gr.Blocks(theme=gr.themes.Default(primary_hue=gr.themes.colors.red, secondary_hue=gr.themes.colors.pink)) as demo: + ... +``` + +
+ +
+ +预定义的颜色包括: + +- `slate` +- `gray` +- `zinc` +- `neutral` +- `stone` +- `red` +- `orange` +- `amber` +- `yellow` +- `lime` +- `green` +- `emerald` +- `teal` +- `cyan` +- `sky` +- `blue` +- `indigo` +- `violet` +- `purple` +- `fuchsia` +- `pink` +- `rose` + +您还可以创建自己的自定义 `Color` 对象并传递它们。 + +### 核心大小 (Core Sizing) + +接下来的 3 个构造函数参数设置主题的大小,并且是 `gradio.themes.Size` 对象。在内部,这些 Size 对象包含从 `xxs` 到 `xxl` 的像素大小值。其他 CSS 变量是从这 3 个大小派生的。 + +- `spacing_size`:此设置了元素内部的填充和元素之间的间距。在默认主题中,此值设置为 `gradio.themes.sizes.spacing_md`。 +- `radius_size`:此设置了元素的圆角弧度。在默认主题中,此值设置为 `gradio.themes.sizes.radius_md`。 +- `text_size`:此设置了文本的字体大小。在默认主题中,此值设置为 `gradio.themes.sizes.text_md`。 + +您可以使用字符串快捷方式修改这些值,例如 + +```python +with gr.Blocks(theme=gr.themes.Default(spacing_size="sm", radius_size="none")) as demo: + ... +``` + +或者直接使用 `Size` 对象,如下所示: + +```python +with gr.Blocks(theme=gr.themes.Default(spacing_size=gr.themes.sizes.spacing_sm, radius_size=gr.themes.sizes.radius_none)) as demo: + ... +``` + +
+ +
+ +预定义的大小对象包括: + +- `radius_none` +- `radius_sm` +- `radius_md` +- `radius_lg` +- `spacing_sm` +- `spacing_md` +- `spacing_lg` +- `text_sm` +- `text_md` +- `text_lg` + +您还可以创建自己的自定义 `Size` 对象并传递它们。 + +### 核心字体(Core Fonts) + +最后的 2 个构造函数参数设置主题的字体。您可以将一系列字体传递给这些参数,以指定回退字体。如果提供了字符串,它将被加载为系统字体。如果提供了 `gradio.themes.GoogleFont`,则将从 Google Fonts 加载该字体。 + +- `font`:此设置主题的主要字体。在默认主题中,此值设置为 `gradio.themes.GoogleFont("Source Sans Pro")`。 +- `font_mono`:此设置主题的等宽字体。在默认主题中,此值设置为 `gradio.themes.GoogleFont("IBM Plex Mono")`。 + +您可以修改这些值,例如以下方式: + +```python +with gr.Blocks(theme=gr.themes.Default(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])) as demo: + ... +``` + +
+ +
+ +## 通过 `.set()` 扩展主题 + +主题加载后,您还可以修改 CSS 变量的值。为此,请使用主题对象的 `.set()` 方法来访问 CSS 变量。例如: + +```python +theme = gr.themes.Default(primary_hue="blue").set( loader_color="#FF0000", slider_color="#FF0000",) +使用`gr.Blocks(theme=theme)`创建演示块 ... +``` + +在上面的示例中,我们将 `loader_color` 和 `slider_color` 变量设置为`#FF0000`,尽管整体 `primary_color` 使用蓝色调色板。您可以以这种方式设置主题中定义的任何 CSS 变量。 +您的 IDE 类型提示应该帮助您导航这些变量。由于有很多 CSS 变量,让我们看一下这些变量的命名和组织方式。 + +### CSS 变量命名规范 + +CSS 变量名可能会变得很长,例如 `button_primary_background_fill_hover_dark`!但是它们遵循一种常见的命名约定,使得理解变量功能和查找您要查找的变量变得容易。变量名由下划线分隔,由以下组成: + +1. 目标元素,例如 `button`、`slider` 或 `block`。2. 目标元素类型或子元素,例如 `button_primary` 或 `block_label`。3. 属性,例如 `button_primary_background_fill` 或 `block_label_border_width`。4. 任何相关状态,例如 `button_primary_background_fill_hover`。5. 如果在暗模式中值不同,则使用后缀 `_dark`。例如,`input_border_color_focus_dark`。 + 当然,许多 CSS 变量名都比这个短,例如 `table_border_color` 或 `input_shadow`。 + +### CSS 变量组织 + +虽然有数百个 CSS 变量,但并不需要为每个变量都指定单独的值。它们通过引用一组核心变量和彼此引用来获取值。这样做可以仅修改少量变量以改变整个主题的外观和感觉,同时也可以更精细地控制我们可能想要修改的个别元素。 + +#### 引用核心变量 + +要引用其中一个核心构造函数变量,请在变量名前加上星号。要引用核心颜色,请使用`*primary_`、`*secondary_` 或`*neutral_` 前缀,后跟亮度值。例如: + +```python +theme = gr.themes.Default(primary_hue="blue").set( + button_primary_background_fill="*primary_200", + button_primary_background_fill_hover="*primary_300", +) +``` + +在上面的示例中,我们将 `button_primary_background_fill` 和 `button_primary_background_fill_hover` 变量分别设置为`*primary_200` 和`*primary_300`。这些变量将分别设置为蓝色主色调调色板的 200 和 300 亮度值。 +同样地,要引用核心大小,请使用`*spacing_`、`*radius_` 或`*text_` 前缀,后跟大小值。例如: + +```python +theme = gr.themes.Default(radius_size="md").set( + button_primary_border_radius="*radius_xl", +) +``` + +在上面的示例中,我们将 `button_primary_border_radius` 变量设置为`*radius_xl`。此变量将设置为中等半径大小范围的 `xl` 设置。 + +#### 引用其他变量 + +变量也可以引用彼此。例如,请看下面的示例: + +```python +theme = gr.themes.Default().set( + button_primary_background_fill="#FF0000", + button_primary_background_fill_hover="#FF0000", + button_primary_border="#FF0000", +) +``` + +将这些值设置为相同的颜色有点繁琐。相反,我们可以在 `button_primary_background_fill_hover` 和 `button_primary_border` 变量中使用`*` 前缀引用 `button_primary_background_fill` 变量。 + +```python +theme = gr.themes.Default().set( + button_primary_background_fill="#FF0000", + button_primary_background_fill_hover="*button_primary_background_fill", + button_primary_border="*button_primary_background_fill", +) +``` + +现在,如果我们更改 `button_primary_background_fill` 变量,`button_primary_background_fill_hover` 和 `button_primary_border` 变量将自动更新。 +如果您打算共享主题,这将非常有用- 它使得修改主题变得容易,而无需更改每个变量。 +请注意,暗模式变量自动相互引用。例如: + +```python +theme = gr.themes.Default().set( + button_primary_background_fill="#FF0000", + button_primary_background_fill_dark="#AAAAAA", + button_primary_border="*button_primary_background_fill", + button_primary_border_dark="*button_primary_background_fill_dark", +) +``` + +`button_primary_border_dark` 将从 `button_primary_background_fill_dark` 获取其值,因为暗模式总是使用变量的暗版本。 + +## 创建一个完整的主题 + +假设您想从头开始创建一个主题!我们将逐步进行 - 您还可以参考 gradio 源代码库中预构建主题的源代码,请看这里的示例:[Monochrome theme 的源代码](https://github.com/gradio-app/gradio/blob/main/gradio/themes/monochrome.py) +我们的新主题类将继承自 `gradio.themes.Base`,这是一个设置了许多方便默认值的主题。让我们创建一个名为 Seafoam 的简单演示,以及使用它的简单应用程序。 +$code_theme_new_step_1 + +
+ +
+ +Base 主题非常简洁,使用 `gr.themes.Blue` 作为其主要颜色-由于此原因,主按钮和加载动画都是蓝色的。让我们改变应用程序的默认核心参数。我们将覆盖构造函数并传递新的默认值给核心构造函数参数。 +我们将使用 `gr.themes.Emerald` 作为我们的主要颜色,并将次要和中性色调设置为 `gr.themes.Blue`。我们将使用 `text_lg` 使文本更大。我们将使用 `Quicksand` 作为我们的默认字体,从 Google Fonts 加载。 +$code_theme_new_step_2 + +
+ +
+ +注意到主按钮和加载动画现在是绿色的了吗?这些 CSS 变量与 `primary_hue` 相关联。 +我们来直接修改主题。我们将调用 `set()` 方法来明确覆盖 CSS 变量值。我们可以使用任何 CSS 逻辑,并使用`*` 前缀引用我们的核心构造函数的参数。 + +$code_theme_new_step_3 + +
+ +
+ +看看我们的主题现在多么有趣!仅通过几个变量的更改,我们的主题完全改变了。 + +您可能会发现探索[其他预建主题的源代码](https://github.com/gradio-app/gradio/blob/main/gradio/themes)会很有帮助,以了解他们如何修改基本主题。您还可以使用浏览器的检查工具,选择 UI 中的元素并查看在样式面板中使用的 CSS 变量。 + +## 分享主题 + +在创建主题后,您可以将其上传到 HuggingFace Hub,让其他人查看、使用和构建主题! + +### 上传主题 + +有两种上传主题的方式,通过主题类实例或命令行。我们将使用之前创建的“seafoam”主题来介绍这两种方式。 + +- 通过类实例 + +每个主题实例都有一个名为“push_to_hub”的方法,我们可以使用它来将主题上传到 HuggingFace Hub。 + +```python +seafoam.push_to_hub(repo_name="seafoam", + version="0.0.1", + hf_token="") +``` + +- 通过命令行 + +首先将主题保存到磁盘 + +```python +seafoam.dump(filename="seafoam.json") +``` + +然后使用“upload_theme”命令: + +```bash +upload_theme\ +"seafoam.json"\ +"seafoam"\ +--version "0.0.1"\ +--hf_token "" +``` + +要上传主题,您必须拥有一个 HuggingFace 账户,并通过 `hf_token` 参数传递您的[访问令牌](https://huggingface.co/docs/huggingface_hub/quick-start#login)。 +但是,如果您通过[HuggingFace 命令行](https://huggingface.co/docs/huggingface_hub/quick-start#login)登录(与 `gradio` 一起安装), +那么您可以省略 `hf_token` 参数。 + +`version` 参数允许您为主题指定一个有效的[语义版本](https://www.geeksforgeeks.org/introduction-semantic-versioning/)字符串。 +这样,您的用户就可以在他们的应用程序中指定要使用的主题版本。这还允许您发布主题更新而不必担心 +以前创建的应用程序的外观如何更改。`version` 参数是可选的。如果省略,下一个修订版本将自动应用。 + +### 主题预览 + +通过调用 `push_to_hub` 或 `upload_theme`,主题资源将存储在[HuggingFace 空间](https://huggingface.co/docs/hub/spaces-overview)中。 + +我们的 seafoam 主题的预览在这里:[seafoam 预览](https://huggingface.co/spaces/gradio/seafoam)。 + +
+ +
+ +### 发现主题 + +[主题库](https://huggingface.co/spaces/gradio/theme-gallery)显示了所有公开的 gradio 主题。在发布主题之后, +它将在几分钟后自动显示在主题库中。 + +您可以按照空间上点赞的数量以及按创建时间从最近到最近对主题进行排序,也可以在浅色和深色模式之间切换主题。 + +
+ +
+ +### 下载 + +要使用 Hub 中的主题,请在 `ThemeClass` 上使用 `from_hub` 方法,然后将其传递给您的应用程序: + +```python +my_theme = gr.Theme.from_hub("gradio/seafoam") + +with gr.Blocks(theme=my_theme) as demo: + .... +``` + +您也可以直接将主题字符串传递给 `Blocks` 或 `Interface`(`gr.Blocks(theme="gradio/seafoam")`) + +您可以通过使用语义版本表达式将您的应用程序固定到上游主题版本。 + +例如,以下内容将确保我们从“seafoam”仓库中加载的主题位于 `0.0.1` 和 `0.1.0` 版本之间: + +```python +with gr.Blocks(theme="gradio/seafoam@>=0.0.1,<0.1.0") as demo: + .... +``` + +享受创建自己的主题吧!如果您制作了一个自豪的主题,请将其上传到 Hub 与世界分享! +如果在[Twitter](https://twitter.com/gradio)上标记我们,我们可以给您的主题一个宣传! + + diff --git a/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/using-flagging.md b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/using-flagging.md new file mode 100644 index 0000000000000000000000000000000000000000..eca07fca43dbcb32c3eadbfa9780475759584fd3 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/07_other-tutorials/using-flagging.md @@ -0,0 +1,197 @@ +# 使用标记 + +相关空间:https://huggingface.co/spaces/gradio/calculator-flagging-crowdsourced, https://huggingface.co/spaces/gradio/calculator-flagging-options, https://huggingface.co/spaces/gradio/calculator-flag-basic +标签:标记,数据 + +## 简介 + +当您演示一个机器学习模型时,您可能希望收集试用模型的用户的数据,特别是模型行为不如预期的数据点。捕获这些“困难”数据点是有价值的,因为它允许您改进机器学习模型并使其更可靠和稳健。 + +Gradio 通过在每个“界面”中包含一个**标记**按钮来简化这些数据的收集。这使得用户或测试人员可以轻松地将数据发送回运行演示的机器。样本会保存在一个 CSV 日志文件中(默认情况下)。如果演示涉及图像、音频、视频或其他类型的文件,则这些文件会单独保存在一个并行目录中,并且这些文件的路径会保存在 CSV 文件中。 + +## 在 `gradio.Interface` 中使用**标记**按钮 + +使用 Gradio 的 `Interface` 进行标记特别简单。默认情况下,在输出组件下方有一个标记为**标记**的按钮。当用户测试您的模型时,如果看到有趣的输出,他们可以点击标记按钮将输入和输出数据发送回运行演示的机器。样本会保存在一个 CSV 日志文件中(默认情况下)。如果演示涉及图像、音频、视频或其他类型的文件,则这些文件会单独保存在一个并行目录中,并且这些文件的路径会保存在 CSV 文件中。 + +在 `gradio.Interface` 中有[四个参数](https://gradio.app/docs/#interface-header)控制标记的工作方式。我们将详细介绍它们。 + +- `allow_flagging`:此参数可以设置为 `"manual"`(默认值),`"auto"` 或 `"never"`。 + - `manual`:用户将看到一个标记按钮,只有在点击按钮时样本才会被标记。 + - `auto`:用户将不会看到一个标记按钮,但每个样本都会自动被标记。 + - `never`:用户将不会看到一个标记按钮,并且不会标记任何样本。 +- `flagging_options`:此参数可以是 `None`(默认值)或字符串列表。 + - 如果是 `None`,则用户只需点击**标记**按钮,不会显示其他选项。 + - 如果提供了一个字符串列表,则用户会看到多个按钮,对应于提供的每个字符串。例如,如果此参数的值为`[" 错误 ", " 模糊 "]`,则会显示标记为**标记为错误**和**标记为模糊**的按钮。这仅适用于 `allow_flagging` 为 `"manual"` 的情况。 + - 所选选项将与输入和输出一起记录。 +- `flagging_dir`:此参数接受一个字符串。 + - 它表示标记数据存储的目录名称。 +- `flagging_callback`:此参数接受 `FlaggingCallback` 类的子类的实例 + - 使用此参数允许您编写在点击标记按钮时运行的自定义代码 + - 默认情况下,它设置为 `gr.CSVLogger` 的一个实例 + - 一个示例是将其设置为 `gr.HuggingFaceDatasetSaver` 的一个实例,这样您可以将任何标记的数据导入到 HuggingFace 数据集中(参见下文)。 + +## 标记的数据会发生什么? + +在 `flagging_dir` 参数提供的目录中,将记录标记的数据的 CSV 文件。 + +以下是一个示例:下面的代码创建了嵌入其中的计算器界面: + +```python +import gradio as gr + + +def calculator(num1, operation, num2): + if operation == "add": + return num1 + num2 + elif operation == "subtract": + return num1 - num2 + elif operation == "multiply": + return num1 * num2 + elif operation == "divide": + return num1 / num2 + + +iface = gr.Interface( + calculator, + ["number", gr.Radio(["add", "subtract", "multiply", "divide"]), "number"], + "number", + allow_flagging="manual" +) + +iface.launch() +``` + + + +当您点击上面的标记按钮时,启动界面的目录将包括一个新的标记子文件夹,其中包含一个 CSV 文件。该 CSV 文件包括所有被标记的数据。 + +```directory ++-- flagged/ +| +-- logs.csv +``` + +_flagged/logs.csv_ + +```csv +num1,operation,num2,Output,timestamp +5,add,7,12,2022-01-31 11:40:51.093412 +6,subtract,1.5,4.5,2022-01-31 03:25:32.023542 +``` + +如果界面涉及文件数据,例如图像和音频组件,还将创建文件夹来存储这些标记的数据。例如,将 `image` 输入到 `image` 输出界面将创建以下结构。 + +```directory ++-- flagged/ +| +-- logs.csv +| +-- image/ +| | +-- 0.png +| | +-- 1.png +| +-- Output/ +| | +-- 0.png +| | +-- 1.png +``` + +_flagged/logs.csv_ + +```csv +im,Output timestamp +im/0.png,Output/0.png,2022-02-04 19:49:58.026963 +im/1.png,Output/1.png,2022-02-02 10:40:51.093412 +``` + +如果您希望用户为标记提供一个原因,您可以将字符串列表传递给 Interface 的 `flagging_options` 参数。用户在标记时必须选择其中一项,选项将作为附加列保存在 CSV 文件中。 + +如果我们回到计算器示例,下面的代码将创建嵌入其中的界面。 + +```python +iface = gr.Interface( + calculator, + ["number", gr.Radio(["add", "subtract", "multiply", "divide"]), "number"], + "number", + allow_flagging="manual", + flagging_options=["wrong sign", "off by one", "other"] +) + +iface.launch() +``` + + + +当用户点击标记按钮时,CSV 文件现在将包括指示所选选项的列。 + +_flagged/logs.csv_ + +```csv +num1,operation,num2,Output,flag,timestamp +5,add,7,-12,wrong sign,2022-02-04 11:40:51.093412 +6,subtract,1.5,3.5,off by one,2022-02-04 11:42:32.062512 +``` + +## HuggingFaceDatasetSaver 回调 + +有时,将数据保存到本地 CSV 文件是不合理的。例如,在 Hugging Face Spaces 上 +,开发者通常无法访问托管 Gradio 演示的底层临时机器。这就是为什么,默认情况下,在 Hugging Face Space 中关闭标记的原因。然而, +您可能希望对标记的数据做其他处理。 +you may want to do something else with the flagged data. + +通过 `flagging_callback` 参数,我们使这变得非常简单。 + +例如,下面我们将会将标记的数据从我们的计算器示例导入到 Hugging Face 数据集中,以便我们可以构建一个“众包”数据集: + +```python +import os + +HF_TOKEN = os.getenv('HF_TOKEN') +hf_writer = gr.HuggingFaceDatasetSaver(HF_TOKEN, "crowdsourced-calculator-demo") + +iface = gr.Interface( + calculator, + ["number", gr.Radio(["add", "subtract", "multiply", "divide"]), "number"], + "number", + description="Check out the crowd-sourced dataset at: [https://huggingface.co/datasets/aliabd/crowdsourced-calculator-demo](https://huggingface.co/datasets/aliabd/crowdsourced-calculator-demo)", + allow_flagging="manual", + flagging_options=["wrong sign", "off by one", "other"], + flagging_callback=hf_writer +) + +iface.launch() +``` + +注意,我们使用我们的 Hugging Face 令牌和 +要保存样本的数据集的名称,定义了我们自己的 +`gradio.HuggingFaceDatasetSaver` 的实例。此外,我们还将 `allow_flagging="manual"` 设置为了 +,因为在 Hugging Face Spaces 中,`allow_flagging` 默认设置为 `"never"`。这是我们的演示: + + + +您现在可以在这个[公共的 Hugging Face 数据集](https://huggingface.co/datasets/aliabd/crowdsourced-calculator-demo)中看到上面标记的所有示例。 + +![flagging callback hf](/assets/guides/flagging-callback-hf.png) + +我们创建了 `gradio.HuggingFaceDatasetSaver` 类,但只要它继承自[此文件](https://github.com/gradio-app/gradio/blob/master/gradio/flagging.py)中定义的 `FlaggingCallback`,您可以传递自己的自定义类。如果您创建了一个很棒的回调,请将其贡献给该存储库! + +## 使用 Blocks 进行标记 + +如果您正在使用 `gradio.Blocks`,又该怎么办呢?一方面,使用 Blocks 您拥有更多的灵活性 +--您可以编写任何您想在按钮被点击时运行的 Python 代码, +并使用 Blocks 中的内置事件分配它。 + +同时,您可能希望使用现有的 `FlaggingCallback` 来避免编写额外的代码。 +这需要两个步骤: + +1. 您必须在代码中的某个位置运行您的回调的 `.setup()` 方法 + 在第一次标记数据之前 +2. 当点击标记按钮时,您触发回调的 `.flag()` 方法, + 确保正确收集参数并禁用通常的预处理。 + +下面是一个使用默认的 `CSVLogger` 标记图像怀旧滤镜 Blocks 演示的示例: +data using the default `CSVLogger`: + +$code_blocks_flag +$demo_blocks_flag + +## 隐私 + +重要提示:请确保用户了解他们提交的数据何时被保存以及您计划如何处理它。当您使用 `allow_flagging=auto`(当通过演示提交的所有数据都被标记时),这一点尤为重要 + +### 这就是全部!祝您建设愉快 :) diff --git a/testbed/gradio-app__gradio/guides/cn/CONTRIBUTING.md b/testbed/gradio-app__gradio/guides/cn/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..d43e06dbbf7cc172ac23b41c98b8a7be0f241349 --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/CONTRIBUTING.md @@ -0,0 +1,30 @@ +# Contributing a Guide + +Want to help teach Gradio? Consider contributing a Guide! 🤗 + +Broadly speaking, there are two types of guides: + +- **Use cases**: guides that cover step-by-step how to build a particular type of machine learning demo or app using Gradio. Here's an example: [_Creating a Chatbot_](https://github.com/gradio-app/gradio/blob/master/guides/creating_a_chatbot.md) +- **Feature explanation**: guides that describe in detail a particular feature of Gradio. Here's an example: [_Using Flagging_](https://github.com/gradio-app/gradio/blob/master/guides/using_flagging.md) + +We encourage you to submit either type of Guide! (Looking for ideas? We may also have open [issues](https://github.com/gradio-app/gradio/issues?q=is%3Aopen+is%3Aissue+label%3Aguides) where users have asked for guides on particular topics) + +## Guide Structure + +As you can see with the previous examples, Guides are standard markdown documents. They usually: + +- start with an Introduction section describing the topic +- include subheadings to make articles easy to navigate +- include real code snippets that make it easy to follow along and implement the Guide +- include embedded Gradio demos to make them more interactive and provide immediate demonstrations of the topic being discussed. These Gradio demos are hosted on [Hugging Face Spaces](https://huggingface.co/spaces) and are embedded using the standard \ tag. + +## How to Contribute a Guide + +1. Clone or fork this `gradio` repo +2. Add a new markdown document with a descriptive title to the `/guides` folder +3. Write your Guide in standard markdown! Embed Gradio demos wherever helpful +4. Add a list of `related_spaces` at the top of the markdown document (see the previously linked Guides for how to do this) +5. Add 3 `tags` at the top of the markdown document to help users find your guide (again, see the previously linked Guides for how to do this) +6. Open a PR to have your guide reviewed + +That's it! We're looking forward to reading your Guide 🥳 diff --git a/testbed/gradio-app__gradio/guides/cn/assets/sharing.svg b/testbed/gradio-app__gradio/guides/cn/assets/sharing.svg new file mode 100644 index 0000000000000000000000000000000000000000..334e0d011e3d87e37d82368808fe6dad9c41766d --- /dev/null +++ b/testbed/gradio-app__gradio/guides/cn/assets/sharing.svg @@ -0,0 +1,487 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lion + lion + HOST + REMOTE USERS + diff --git a/testbed/gradio-app__gradio/js/_cdn-test/favicon.svg b/testbed/gradio-app__gradio/js/_cdn-test/favicon.svg new file mode 100644 index 0000000000000000000000000000000000000000..de4aeddc12bdfe6c668dd57c6b457a659c058914 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_cdn-test/favicon.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_cdn-test/index.html b/testbed/gradio-app__gradio/js/_cdn-test/index.html new file mode 100644 index 0000000000000000000000000000000000000000..e6db963a67588a4d80b5cb9f8f4bc91907cdf467 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_cdn-test/index.html @@ -0,0 +1,320 @@ + + + + + + + + + + + + + Vite App + + + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_cdn-test/package.json b/testbed/gradio-app__gradio/js/_cdn-test/package.json new file mode 100644 index 0000000000000000000000000000000000000000..11f020bc43fddfc04113ed564545bc53948b6d7d --- /dev/null +++ b/testbed/gradio-app__gradio/js/_cdn-test/package.json @@ -0,0 +1,13 @@ +{ + "name": "@gradio/cdn-test", + "private": true, + "version": "0.0.0", + "scripts": { + "dev": "vite --port 3001", + "build": "vite build", + "preview": "vite preview --port 3001" + }, + "devDependencies": { + "vite": "^4.0.0" + } +} diff --git a/testbed/gradio-app__gradio/js/_cdn-test/style.css b/testbed/gradio-app__gradio/js/_cdn-test/style.css new file mode 100644 index 0000000000000000000000000000000000000000..5ad8c6b5bee5a4e888a3eb7b7cdda8416f95c20c --- /dev/null +++ b/testbed/gradio-app__gradio/js/_cdn-test/style.css @@ -0,0 +1,8 @@ +#app { + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + margin-top: 60px; + color: #2c3e50; + font-family: Avenir, Helvetica, Arial, sans-serif; + text-align: center; +} diff --git a/testbed/gradio-app__gradio/js/_spaces-test/.gitignore b/testbed/gradio-app__gradio/js/_spaces-test/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6635cf5542756197081eedaa1ec3a7c2c5a0b537 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/.gitignore @@ -0,0 +1,10 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example +vite.config.js.timestamp-* +vite.config.ts.timestamp-* diff --git a/testbed/gradio-app__gradio/js/_spaces-test/.npmrc b/testbed/gradio-app__gradio/js/_spaces-test/.npmrc new file mode 100644 index 0000000000000000000000000000000000000000..0c05da457e450c0a6fafe36006e17fa39abc899b --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/.npmrc @@ -0,0 +1,2 @@ +engine-strict=true +resolution-mode=highest diff --git a/testbed/gradio-app__gradio/js/_spaces-test/.prettierignore b/testbed/gradio-app__gradio/js/_spaces-test/.prettierignore new file mode 100644 index 0000000000000000000000000000000000000000..38972655faff07d2cc0383044bbf9f43b22c2248 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/.prettierignore @@ -0,0 +1,13 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example + +# Ignore files for PNPM, NPM and YARN +pnpm-lock.yaml +package-lock.json +yarn.lock diff --git a/testbed/gradio-app__gradio/js/_spaces-test/README.md b/testbed/gradio-app__gradio/js/_spaces-test/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5c91169b0ca6508bb24301c957a9edea5abf2b01 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/README.md @@ -0,0 +1,38 @@ +# create-svelte + +Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/master/packages/create-svelte). + +## Creating a project + +If you're seeing this, you've probably already done this step. Congrats! + +```bash +# create a new project in the current directory +npm create svelte@latest + +# create a new project in my-app +npm create svelte@latest my-app +``` + +## Developing + +Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server: + +```bash +npm run dev + +# or start the server and open the app in a new browser tab +npm run dev -- --open +``` + +## Building + +To create a production version of your app: + +```bash +npm run build +``` + +You can preview the production build with `npm run preview`. + +> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment. diff --git a/testbed/gradio-app__gradio/js/_spaces-test/jsconfig.json b/testbed/gradio-app__gradio/js/_spaces-test/jsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..fe45e13fdd06dba2006f36965786f3b32e3cadf7 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/jsconfig.json @@ -0,0 +1,17 @@ +{ + "extends": "./.svelte-kit/tsconfig.json", + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true + } + // Path aliases are handled by https://kit.svelte.dev/docs/configuration#alias and https://kit.svelte.dev/docs/configuration#files + // + // If you want to overwrite includes/excludes, make sure to copy over the relevant includes/excludes + // from the referenced tsconfig.json - TypeScript does not merge them in +} diff --git a/testbed/gradio-app__gradio/js/_spaces-test/package.json b/testbed/gradio-app__gradio/js/_spaces-test/package.json new file mode 100644 index 0000000000000000000000000000000000000000..b35adc30991c0d799df8f118ef302cb939c8d213 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/package.json @@ -0,0 +1,30 @@ +{ + "name": "@gradio/spaces-test", + "version": "0.0.1", + "private": true, + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./jsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./jsconfig.json --watch", + "lint": "prettier --plugin-search-dir . --check .", + "format": "prettier --plugin-search-dir . --write ." + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^2.0.0", + "@sveltejs/kit": "^1.5.0", + "prettier": "^3.0.0", + "prettier-plugin-svelte": "^3.0.0", + "svelte": "^3.54.0", + "svelte-check": "^3.0.1", + "typescript": "^5.0.0", + "vite": "^4.3.0" + }, + "type": "module", + "dependencies": { + "@gradio/client": "workspace:^", + "@gradio/form": "workspace:^", + "@gradio/theme": "workspace:^" + } +} diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/app.d.ts b/testbed/gradio-app__gradio/js/_spaces-test/src/app.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..f59b884c51ed3c31fc0738fd38d0d75b580df5e4 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/app.d.ts @@ -0,0 +1,12 @@ +// See https://kit.svelte.dev/docs/types#app +// for information about these interfaces +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface Platform {} + } +} + +export {}; diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/app.html b/testbed/gradio-app__gradio/js/_spaces-test/src/app.html new file mode 100644 index 0000000000000000000000000000000000000000..77ec85d79aaca6e8313b256abf368b912e486b4d --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/app.html @@ -0,0 +1,12 @@ + + + + + + + %sveltekit.head% + + +
%sveltekit.body%
+ + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/lib/EndpointInputs.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/EndpointInputs.svelte new file mode 100644 index 0000000000000000000000000000000000000000..ae9833040f8a5072b2f9844a61df272e0e85f080 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/EndpointInputs.svelte @@ -0,0 +1,88 @@ + + +

Request Inputs

+ +{#each app_info as { type, label, component }, i} + {#if type === "string"} + + {:else if type === "number"} + + {:else if type === "boolean"} + + {:else if type === "number"} + + {:else if type === "string[]"} + + {:else if ["Image", "Audio", "Video"].includes(component)} + + {/if} +{/each} + + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/lib/ResponsePreview.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/ResponsePreview.svelte new file mode 100644 index 0000000000000000000000000000000000000000..61114c79190667dd711596d970fdccb9a8b3fe33 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/ResponsePreview.svelte @@ -0,0 +1,90 @@ + + +
+
+

Response Outputs

+ {#if status === "pending" || status === "generating"} + + {:else if status === "error"} + + {:else if status === "complete"} + + {/if} +
+ {#each app_info as { type, label, component }, i} + {#if type === "string"} + + {:else if type === "number"} + + {:else if type === "boolean"} + + {:else if type === "number"} + + {:else if type === "string[]"} + + {/if} + {/each} + +

JSON

+
{JSON.stringify(
+				response_data.data.length ? response_data : {},
+				null,
+				2
+			)}
+
+ + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Spinner.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Spinner.svelte new file mode 100644 index 0000000000000000000000000000000000000000..3c402f355c0a133f82da77a0d40ae14ac34698d4 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Spinner.svelte @@ -0,0 +1,43 @@ + + + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Success.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Success.svelte new file mode 100644 index 0000000000000000000000000000000000000000..abe7c7f4087bce6a98c749f558447fd839a4a6a3 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Success.svelte @@ -0,0 +1,27 @@ + + + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Warning.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Warning.svelte new file mode 100644 index 0000000000000000000000000000000000000000..41f88667da71c100c6037d8b534f1fcf8c54be9a --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/lib/Warning.svelte @@ -0,0 +1,30 @@ + + + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/routes/+layout.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/+layout.svelte new file mode 100644 index 0000000000000000000000000000000000000000..361ac4ccca02441e08525d3d97e0e1b900821e03 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/+layout.svelte @@ -0,0 +1,71 @@ + + + + + + + + + +
+

Gradio Test Space

+
    + {#each links as [url, name]} +
  • {name}
  • + {/each} +
+ +
+ + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/routes/+page.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/+page.svelte new file mode 100644 index 0000000000000000000000000000000000000000..d393e37532c3125eea121cf058f4851045d0d56f --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/+page.svelte @@ -0,0 +1 @@ +

Embeds

diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/routes/client-browser/+page.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/client-browser/+page.svelte new file mode 100644 index 0000000000000000000000000000000000000000..55e8bd101f447cbf20c1f141765bb820ceb6779c --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/client-browser/+page.svelte @@ -0,0 +1,309 @@ + + +

Client Browser

+ +

+ Enter a space user-space/name to test the client in a browser environment + with any space. +

+

+ You may optionally provide a hf_token to test a private space +

+ +
+ + + +
+ + + +{#if named.length || unnamed.length} +
+
+

Named endpoints

+ {#if named.length} + {#each named as endpoint} + + {/each} + {:else} +

There are no named endpoints

+ {/if} +
+ +
+

Unnamed endpoints

+ + {#if unnamed.length} + {#each unnamed as endpoint} + + {/each} + {:else} +

There are no unnamed endpoints

+ {/if} +
+
+{/if} + +{#if app_info} +
+

+ This endpoint accepts {app_info.parameters.length + ? app_info.parameters.length + : "no"} piece{app_info.parameters.length < 1 || + app_info.parameters.length > 1 + ? "s" + : ""} of data and returns {app_info.returns.length + ? app_info.returns.length + : "no"} piece{app_info.returns.length < 1 || app_info.returns.length > 1 + ? "s" + : ""} of data. {endpoint_type_text} +

+
+
+
+ + + {#if app_info.type.generator || app_info.type.continuous} + + {/if} +
+
+ +
+
+{/if} + + diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/routes/client-node/+page.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/client-node/+page.svelte new file mode 100644 index 0000000000000000000000000000000000000000..690c371cf19abc343c717da635e21be83fdb1783 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/client-node/+page.svelte @@ -0,0 +1,3 @@ +

Client Node

+ +

coming soon.

diff --git a/testbed/gradio-app__gradio/js/_spaces-test/src/routes/embeds/+page.svelte b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/embeds/+page.svelte new file mode 100644 index 0000000000000000000000000000000000000000..3d16c48b2757895e5003fafcdf1512e53c83787c --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/src/routes/embeds/+page.svelte @@ -0,0 +1,3 @@ +

Embeds

+ +

Coming soon.

diff --git a/testbed/gradio-app__gradio/js/_spaces-test/svelte.config.js b/testbed/gradio-app__gradio/js/_spaces-test/svelte.config.js new file mode 100644 index 0000000000000000000000000000000000000000..0aee7cd5d15b21ae5500a13ce45ae98fd115a04a --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/svelte.config.js @@ -0,0 +1,13 @@ +import adapter from "@sveltejs/adapter-auto"; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + kit: { + // adapter-auto only supports some environments, see https://kit.svelte.dev/docs/adapter-auto for a list. + // If your environment is not supported or you settled on a specific environment, switch out the adapter. + // See https://kit.svelte.dev/docs/adapters for more information about adapters. + adapter: adapter() + } +}; + +export default config; diff --git a/testbed/gradio-app__gradio/js/_spaces-test/vite.config.js b/testbed/gradio-app__gradio/js/_spaces-test/vite.config.js new file mode 100644 index 0000000000000000000000000000000000000000..328b1f5f9ad6758dccef48ff9ff3cafc12d203cd --- /dev/null +++ b/testbed/gradio-app__gradio/js/_spaces-test/vite.config.js @@ -0,0 +1,6 @@ +import { sveltekit } from "@sveltejs/kit/vite"; +import { defineConfig } from "vite"; + +export default defineConfig({ + plugins: [sveltekit()] +}); diff --git a/testbed/gradio-app__gradio/js/_website/.gitignore b/testbed/gradio-app__gradio/js/_website/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..d9c05763334c4dfec921a02904ba877016c5f849 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/.gitignore @@ -0,0 +1,10 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example +src/lib/json/ +.vercel diff --git a/testbed/gradio-app__gradio/js/_website/.npmrc b/testbed/gradio-app__gradio/js/_website/.npmrc new file mode 100644 index 0000000000000000000000000000000000000000..b6f27f135954640c8cc5bfd7b8c9922ca6eb2aad --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/testbed/gradio-app__gradio/js/_website/CHANGELOG.md b/testbed/gradio-app__gradio/js/_website/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..e09961db6f7c43d49ea75ef3fe9b77b19dd977af --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/CHANGELOG.md @@ -0,0 +1,102 @@ +# website + +## 0.7.0 + +### Features + +- [#5643](https://github.com/gradio-app/gradio/pull/5643) [`f661c0733`](https://github.com/gradio-app/gradio/commit/f661c0733b501f1a54a0c62af2567909c7202944) - Add the brand assets page to the website. Thanks [@whitphx](https://github.com/whitphx)! +- [#5675](https://github.com/gradio-app/gradio/pull/5675) [`b619e6f6e`](https://github.com/gradio-app/gradio/commit/b619e6f6e4ca55334fb86da53790e45a8f978566) - Reorganize Docs Navbar and Fill in Gaps. Thanks [@aliabd](https://github.com/aliabd)! +- [#5669](https://github.com/gradio-app/gradio/pull/5669) [`c5e969559`](https://github.com/gradio-app/gradio/commit/c5e969559612f956afcdb0c6f7b22ab8275bc49a) - Fix small issues in docs and guides. Thanks [@aliabd](https://github.com/aliabd)! + +### Fixes + +- [#5608](https://github.com/gradio-app/gradio/pull/5608) [`eebf9d71f`](https://github.com/gradio-app/gradio/commit/eebf9d71f90a83bd84b62c855fdcd13b086f7ad5) - Styling fixes to guides. Thanks [@aliabd](https://github.com/aliabd)! + +## 0.6.0 + +### Features + +- [#5565](https://github.com/gradio-app/gradio/pull/5565) [`f0514fc49`](https://github.com/gradio-app/gradio/commit/f0514fc49ea04ba01dce748238e1fd16f9cb5d8b) - Route docs and guide urls correctly. Thanks [@aliabd](https://github.com/aliabd)! + +## 0.5.0 + +### Features + +- [#5481](https://github.com/gradio-app/gradio/pull/5481) [`df623e74`](https://github.com/gradio-app/gradio/commit/df623e743aad4b21a7eda9bae4c03eb17f01c90d) - Toggle main vs versioned demos on website and show install snippet. Thanks [@aliabd](https://github.com/aliabd)! + +## 0.4.0 + +### Features + +- [#5423](https://github.com/gradio-app/gradio/pull/5423) [`bb31cd7d`](https://github.com/gradio-app/gradio/commit/bb31cd7dd0dc60c18b2b21269512775f3784ef01) - Remove stable diffusion demo from landing page. Thanks [@aliabd](https://github.com/aliabd)! + +## 0.3.0 + +### Features + +- [#5271](https://github.com/gradio-app/gradio/pull/5271) [`97c3c7b1`](https://github.com/gradio-app/gradio/commit/97c3c7b1730407f9e80566af9ecb4ca7cccf62ff) - Move scripts from old website to CI. Thanks [@aliabd](https://github.com/aliabd)! +- [#5381](https://github.com/gradio-app/gradio/pull/5381) [`3d66e61d`](https://github.com/gradio-app/gradio/commit/3d66e61d641da8ca2a7d10c545c7dc0139697f00) - chore(deps): update dependency hast-util-to-string to v3. Thanks [@renovate](https://github.com/apps/renovate)! + +### Fixes + +- [#5304](https://github.com/gradio-app/gradio/pull/5304) [`05892302`](https://github.com/gradio-app/gradio/commit/05892302fb8fe2557d57834970a2b65aea97355b) - Adds kwarg to disable html sanitization in `gr.Chatbot()`. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)! + +## 0.2.2 + +### Features + +- [#5284](https://github.com/gradio-app/gradio/pull/5284) [`5f25eb68`](https://github.com/gradio-app/gradio/commit/5f25eb6836f6a78ce6208b53495a01e1fc1a1d2f) - Minor bug fix sweep. Thanks [@aliabid94](https://github.com/aliabid94)!/n - Our use of __exit__ was catching errors and corrupting the traceback of any component that failed to instantiate (try running blocks_kitchen_sink off main for an example). Now the __exit__ exits immediately if there's been an exception, so the original exception can be printed cleanly/n - HighlightedText was rendering weird, cleaned it up + +## 0.2.1 + +### Fixes + +- [#5324](https://github.com/gradio-app/gradio/pull/5324) [`31996c99`](https://github.com/gradio-app/gradio/commit/31996c991d6bfca8cef975eb8e3c9f61a7aced19) - ensure login form has correct styles. Thanks [@pngwn](https://github.com/pngwn)! + +## 0.2.0 + +### Highlights + +#### Improve startup performance and markdown support ([#5279](https://github.com/gradio-app/gradio/pull/5279) [`fe057300`](https://github.com/gradio-app/gradio/commit/fe057300f0672c62dab9d9b4501054ac5d45a4ec)) + +##### Improved markdown support + +We now have better support for markdown in `gr.Markdown` and `gr.Dataframe`. Including syntax highlighting and Github Flavoured Markdown. We also have more consistent markdown behaviour and styling. + +##### Various performance improvements + +These improvements will be particularly beneficial to large applications. + +- Rather than attaching events manually, they are now delegated, leading to a significant performance improvement and addressing a performance regression introduced in a recent version of Gradio. App startup for large applications is now around twice as fast. +- Optimised the mounting of individual components, leading to a modest performance improvement during startup (~30%). +- Corrected an issue that was causing markdown to re-render infinitely. +- Ensured that the `gr.3DModel` does re-render prematurely. + + Thanks [@pngwn](https://github.com/pngwn)! + +### Features + +- [#5298](https://github.com/gradio-app/gradio/pull/5298) [`cf167cd1`](https://github.com/gradio-app/gradio/commit/cf167cd1dd4acd9aee225ff1cb6fac0e849806ba) - Create event listener table for components on docs. Thanks [@aliabd](https://github.com/aliabd)! +- [#5092](https://github.com/gradio-app/gradio/pull/5092) [`643442e1`](https://github.com/gradio-app/gradio/commit/643442e1a5e25fc0c89a15a38b6279b8955643ac) - generate docs json in ci, reimplement main vs release. Thanks [@pngwn](https://github.com/pngwn)! +- [#5186](https://github.com/gradio-app/gradio/pull/5186) [`24b66e1c`](https://github.com/gradio-app/gradio/commit/24b66e1cff0452bce71c71cea1b818913aeb8d51) - homepage demo update. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +## 0.1.0 + +### Features + +- [#5076](https://github.com/gradio-app/gradio/pull/5076) [`2745075a`](https://github.com/gradio-app/gradio/commit/2745075a26f80e0e16863d483401ff1b6c5ada7a) - Add deploy_discord to docs. Thanks [@freddyaboulton](https://github.com/freddyaboulton)! + +### Fixes + +- [#5111](https://github.com/gradio-app/gradio/pull/5111) [`b84a35b7`](https://github.com/gradio-app/gradio/commit/b84a35b7b91eca947f787648ceb361b1d023427b) - Add icon and link to DuplicateButton. Thanks [@aliabd](https://github.com/aliabd)! +- [#5037](https://github.com/gradio-app/gradio/pull/5037) [`42488c07`](https://github.com/gradio-app/gradio/commit/42488c076aaf3ac2302b27760773a87f5b6ecc41) - Correct gradio version on website. Thanks [@aliabd](https://github.com/aliabd)! + +## 0.0.2 + +### Features + +- [#5009](https://github.com/gradio-app/gradio/pull/5009) [`3e70fc81`](https://github.com/gradio-app/gradio/commit/3e70fc81fc12dcb07f40a280b972a61348c9d263) - Correctly render changelog on website after new formatting. Thanks [@aliabd](https://github.com/aliabd)! + +### Fixes + +- [#5007](https://github.com/gradio-app/gradio/pull/5007) [`71c90394`](https://github.com/gradio-app/gradio/commit/71c90394012a9cfe10eae312b437a6deff52da3a) - Make sure tags aren't rendered inside a guide. Thanks [@aliabd](https://github.com/aliabd)! \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/README.md b/testbed/gradio-app__gradio/js/_website/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5c91169b0ca6508bb24301c957a9edea5abf2b01 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/README.md @@ -0,0 +1,38 @@ +# create-svelte + +Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/master/packages/create-svelte). + +## Creating a project + +If you're seeing this, you've probably already done this step. Congrats! + +```bash +# create a new project in the current directory +npm create svelte@latest + +# create a new project in my-app +npm create svelte@latest my-app +``` + +## Developing + +Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server: + +```bash +npm run dev + +# or start the server and open the app in a new browser tab +npm run dev -- --open +``` + +## Building + +To create a production version of your app: + +```bash +npm run build +``` + +You can preview the production build with `npm run preview`. + +> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment. diff --git a/testbed/gradio-app__gradio/js/_website/generate_jsons/generate.py b/testbed/gradio-app__gradio/js/_website/generate_jsons/generate.py new file mode 100644 index 0000000000000000000000000000000000000000..79828262d7f683b7f76a467225ef750caade20ef --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/generate_jsons/generate.py @@ -0,0 +1,40 @@ +import json +import os +from subprocess import run + +from src import changelog, demos, docs, guides + +WEBSITE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +GRADIO_DIR = os.path.abspath(os.path.join(WEBSITE_DIR, "..", "..", "gradio")) + +def make_dir(root, path): + return os.path.abspath(os.path.join(root, path)) + +def get_latest_release(): + with open(make_dir(GRADIO_DIR, "package.json")) as f: + version = json.load(f)["version"] + with open(make_dir(WEBSITE_DIR, "src/lib/json/version.json"), "w+") as j: + json.dump({ + "version": version + }, j) + with open(make_dir(WEBSITE_DIR, "src/lib/json/wheel.json"), "w+") as j: + sha = run(["git", "log", "-1", "--format='%H'"], capture_output=True).stdout.decode("utf-8").strip("'\n") + json.dump({ + "wheel": f"https://gradio-builds.s3.amazonaws.com/{sha}/gradio-{version}-py3-none-any.whl" + }, j) + + +def create_dir_if_not_exists(path): + if not os.path.exists(path): + os.makedirs(path) + +create_dir_if_not_exists(make_dir(WEBSITE_DIR, "src/lib/json")) +create_dir_if_not_exists(make_dir(WEBSITE_DIR, "src/lib/json/guides")) + +demos.generate(make_dir(WEBSITE_DIR, "src/lib/json/demos.json")) +guides.generate(make_dir(WEBSITE_DIR, "src/lib/json/guides/") + "/") +docs.generate(make_dir(WEBSITE_DIR, "src/lib/json/docs.json")) +changelog.generate(make_dir(WEBSITE_DIR, "src/lib/json/changelog.json")) +get_latest_release() + +print("JSON generated! " + make_dir(WEBSITE_DIR, "src/lib/json/")) \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/generate_jsons/src/changelog/__init__.py b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/changelog/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bd16053436094ab643892af604e4a350ea38a877 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/changelog/__init__.py @@ -0,0 +1,25 @@ +import json +import os +import re + +DIR = os.path.dirname(__file__) +CHANGELOG_FILE = os.path.abspath(os.path.join(DIR, "../../../../../CHANGELOG.md")) + + +def clean(): + with open(CHANGELOG_FILE) as change_file: + content = change_file.read() + + # remove empty/unused sections + content = re.sub(r"## [\w^:\n ]*No changes to highlight.", "", content) + content = content.replace("# gradio", "# Changelog") + + return content + + +def generate(json_path): + content = clean() + with open(json_path, 'w+') as f: + json.dump({ + "content": content, + }, f) \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/generate_jsons/src/demos/__init__.py b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/demos/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c0595c6d660377b58f2309a1d1ef5a6fc579a43d --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/demos/__init__.py @@ -0,0 +1,147 @@ +import json +import os + +DIR = os.path.dirname(__file__) +GRADIO_DEMO_DIR = os.path.abspath(os.path.join(DIR, "../../../../../demo/")) + +def get_code_and_description(demo_name): + with open(os.path.join(GRADIO_DEMO_DIR, demo_name, "run.py")) as f: + code = f.read() + with open(os.path.join(GRADIO_DEMO_DIR, demo_name, "DESCRIPTION.md")) as f: + description = f.read() + return code, description + + +demos_by_category = [ + { + "category": "🖊️ Text & Natural Language Processing", + "demos": [ + { + "name": "Hello World", + "dir": "hello_world", + }, + { + "name": "Text Generation", + "dir": "text_generation", + }, + { + "name": "Autocomplete", + "dir": "autocomplete", + }, + { + "name": "Sentiment Analysis", + "dir": "sentiment_analysis", + }, + { + "name": "Named Entity Recognition", + "dir": "text_analysis", + }, + { + "name": "Multilingual Translation", + "dir": "translation", + } + + ] + }, + { + "category": "🖼️ Images & Computer Vision", + "demos": [ + { + "name": "Image Classification", + "dir": "image_classification", + }, + { + "name": "Image Segmentation", + "dir": "image_segmentation", + }, + { + "name": "Image Transformation with AnimeGAN", + "dir": "animeganv2", + }, + { + "name": "Image Generation (Fake GAN)", + "dir": "fake_gan", + }, + { + "name": "Iterative Output", + "dir": "fake_diffusion", + }, + { + "name": "3D Models", + "dir": "depth_estimation", + }, + ] + }, + { + "category": "📈 Tabular Data & Plots", + "demos": [ + { + "name": "Interactive Dashboard", + "dir": "dashboard" + }, + { + "name": "Dashboard with Live Updates", + "dir": "live_dashboard" + }, + { + "name": "Interactive Map of AirBnB Locations", + "dir": "map_airbnb" + }, + { + "name": "Outbreak Forecast", + "dir": "outbreak_forecast", + }, + { + "name": "Clustering with Scikit-Learn", + "dir": "clustering", + }, + { + "name": "Time Series Forecasting", + "dir": "timeseries-forecasting-with-prophet", + }, + { + "name": "Income Classification with XGBoost", + "dir": "xgboost-income-prediction-with-explainability", + }, + { + "name": "Leaderboard", + "dir": "leaderboard", + }, + { + "name": "Tax Calculator", + "dir": "tax_calculator", + }, + ] + }, + { + "category": "🎤 Audio & Speech", + "demos": [ + { + "name": "Text to Speech", + "dir": "neon-tts-plugin-coqui", + }, + { + "name": "Speech to Text (ASR)", + "dir": "automatic-speech-recognition", + }, + { + "name": "Musical Instrument Identification", + "dir": "musical_instrument_identification", + }, + { + "name": "Speaker Verification", + "dir": "same-person-or-different", + }, + ] + }, +] + +for category in demos_by_category: + for demo in category["demos"]: + code, description = get_code_and_description(demo["dir"]) + demo["code"] = code + demo["text"] = description + +def generate(json_path): + with open(json_path, 'w+') as f: + json.dump(demos_by_category, f) diff --git a/testbed/gradio-app__gradio/js/_website/generate_jsons/src/docs/__init__.py b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/docs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e2170e3a0ce29571df9f1cab453b6db9574d98ed --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/docs/__init__.py @@ -0,0 +1,320 @@ +import json +import os + +from gradio_client.documentation import document_cls, generate_documentation + +from gradio.events import EventListener, EventListenerMethod + +from ..guides import guides + +DIR = os.path.dirname(__file__) +DEMOS_DIR = os.path.abspath(os.path.join(DIR, "../../../../../demo")) +JS_CLIENT_README = os.path.abspath(os.path.join(DIR, "../../../../../client/js/README.md")) + +docs = generate_documentation() +docs["component"].sort(key=lambda x: x["name"]) + + +def add_component_shortcuts(): + for component in docs["component"]: + if not getattr(component["class"], "allow_string_shortcut", True): + continue + component["string_shortcuts"] = [ + ( + component["class"].__name__, + component["name"].lower(), + "Uses default values", + ) + ] + for subcls in component["class"].__subclasses__(): + if getattr(subcls, "is_template", False): + _, tags, _ = document_cls(subcls) + component["string_shortcuts"].append( + ( + subcls.__name__, + subcls.__name__.lower(), + "Uses " + tags.get("sets", "default values"), + ) + ) + + +add_component_shortcuts() + + +def add_demos(): + for mode in docs: + for cls in docs[mode]: + if "demos" not in cls["tags"]: + continue + cls["demos"] = [] + demos = [demo.strip() for demo in cls["tags"]["demos"].split(",")] + for demo in demos: + demo_file = os.path.join(DEMOS_DIR, demo, "run.py") + with open(demo_file) as run_py: + demo_code = run_py.read() + cls["demos"].append((demo, demo_code)) + + +add_demos() + +def create_events_matrix(): + events = [] + for c in EventListener.__subclasses__(): + methods = c().__dict__ + for m in methods: + if m[:1] != '_' and isinstance(methods[m], EventListenerMethod) and m not in events: + events.append(m) + component_events = {} + for component in docs["component"]: + component_event_list = [] + for event in events: + for fn in component["fns"]: + if event == fn["name"]: + component_event_list.append(event) + component_events[component["name"]] = component_event_list + + + return events, component_events + +events, component_events = create_events_matrix() + + +def add_guides(): + for mode in docs: + for cls in docs[mode]: + if "guides" not in cls["tags"]: + continue + cls["guides"] = [] + docstring_guides = [ + guide.strip() for guide in cls["tags"]["guides"].split(",") + ] + for docstring_guide in docstring_guides: + for guide in guides: + if docstring_guide == guide["name"]: + cls["guides"].append(guide) + + +add_guides() + + +def style_types(): + for mode in docs: + for cls in docs[mode]: + for tag in [ + "preprocessing", + "postprocessing", + "examples-format", + "events", + ]: + if tag not in cls["tags"]: + continue + cls[tag] = ( + cls["tags"][tag] + .replace( + "{", + "", + ) + .replace("}", "") + ) + + +style_types() + + +def override_signature(name, signature): + for mode in docs: + for cls in docs[mode]: + if cls["name"] == name: + cls["override_signature"] = signature + + +override_signature("Blocks", "with gradio.Blocks():") +override_signature("Row", "with gradio.Row():") +override_signature("Column", "with gradio.Column():") +override_signature("Tab", "with gradio.Tab():") +override_signature("Group", "with gradio.Group():") +override_signature("Dataset", "gr.Dataset(components, samples)") + + +def find_cls(target_cls): + for mode in docs: + for cls in docs[mode]: + if cls["name"] == target_cls: + return cls + raise ValueError("Class not found") + + +def organize_docs(d): + organized = { + "building": {}, + "components": {}, + "helpers": {}, + "modals": {}, + "routes": {}, + "events": {}, + "py-client": {}, + "chatinterface": {} + } + pages = [] + for mode in d: + for c in d[mode]: + c["parent"] = "gradio" + c["class"] = None + if "returns" in c: + c["returns"]["annotation"] = None + for p in c.get("parameters", []): + p["annotation"] = str(p["annotation"]) + if "default" in p: + p["default"] = str(p["default"]) + for f in c["fns"]: + f["fn"] = None + f["parent"] = "gradio." + c["name"] + for p in f.get("parameters", []): + p["annotation"] = str(p["annotation"]) + if "default" in p: + p["default"] = str(p["default"]) + if mode == "component": + organized["components"][c["name"].lower()] = c + pages.append(c["name"].lower()) + elif mode in ["helpers", "routes", "py-client", "chatinterface", "modals"]: + organized[mode][c["name"].lower()] = c + pages.append(c["name"].lower()) + + else: + # if mode not in organized["building"]: + # organized["building"][mode] = {} + organized["building"][c["name"].lower()] = c + pages.append(c["name"].lower()) + + c_keys = list(organized["components"].keys()) + for i, cls in enumerate(organized["components"]): + if not i: + organized["components"][cls]["prev_obj"] = "Components" + organized["components"][cls]["next_obj"] = organized["components"][ + c_keys[1] + ]["name"] + elif i == len(c_keys) - 1: + organized["components"][cls]["prev_obj"] = organized["components"][ + c_keys[len(c_keys) - 2] + ]["name"] + organized["components"][cls]["next_obj"] = "load" + else: + organized["components"][cls]["prev_obj"] = organized["components"][ + c_keys[i - 1] + ]["name"] + organized["components"][cls]["next_obj"] = organized["components"][ + c_keys[i + 1] + ]["name"] + c_keys = list(organized["helpers"].keys()) + for i, cls in enumerate(organized["helpers"]): + if not i: + organized["helpers"][cls]["prev_obj"] = "Video" + organized["helpers"][cls]["next_obj"] = organized["helpers"][c_keys[1]][ + "name" + ] + elif i == len(c_keys) - 1: + organized["helpers"][cls]["prev_obj"] = organized["helpers"][ + c_keys[len(c_keys) - 2] + ]["name"] + organized["helpers"][cls]["next_obj"] = "Error" + else: + organized["helpers"][cls]["prev_obj"] = organized["helpers"][c_keys[i - 1]][ + "name" + ] + organized["helpers"][cls]["next_obj"] = organized["helpers"][c_keys[i + 1]][ + "name" + ] + c_keys = list(organized["modals"].keys()) + for i, cls in enumerate(organized["modals"]): + if not i: + organized["modals"][cls]["prev_obj"] = "EventData" + organized["modals"][cls]["next_obj"] = organized["modals"][c_keys[1]][ + "name" + ] + elif i == len(c_keys) - 1: + organized["modals"][cls]["prev_obj"] = organized["modals"][ + c_keys[len(c_keys) - 2] + ]["name"] + organized["modals"][cls]["next_obj"] = "Request" + else: + organized["modals"][cls]["prev_obj"] = organized["modals"][c_keys[i - 1]][ + "name" + ] + organized["modals"][cls]["next_obj"] = organized["modals"][c_keys[i + 1]][ + "name" + ] + + c_keys = list(organized["routes"].keys()) + for i, cls in enumerate(organized["routes"]): + if not i: + organized["routes"][cls]["prev_obj"] = "Info" + organized["routes"][cls]["next_obj"] = organized["routes"][c_keys[1]][ + "name" + ] + elif i == len(c_keys) - 1: + organized["routes"][cls]["prev_obj"] = organized["routes"][ + c_keys[len(c_keys) - 2] + ]["name"] + organized["routes"][cls]["next_obj"] = "Flagging" + else: + organized["routes"][cls]["prev_obj"] = organized["routes"][c_keys[i - 1]][ + "name" + ] + organized["routes"][cls]["next_obj"] = organized["routes"][c_keys[i + 1]][ + "name" + ] + c_keys = list(organized["py-client"].keys()) + for i, cls in enumerate(organized["py-client"]): + if not i: + organized["py-client"][cls]["prev_obj"] = "Python-Client" + organized["py-client"][cls]["next_obj"] = organized["py-client"][c_keys[1]][ + "name" + ] + elif i == len(c_keys) - 1: + organized["py-client"][cls]["prev_obj"] = organized["py-client"][ + c_keys[len(c_keys) - 2] + ]["name"] + organized["py-client"][cls]["next_obj"] = "JS-Client" + else: + organized["py-client"][cls]["prev_obj"] = organized["py-client"][ + c_keys[i - 1] + ]["name"] + organized["py-client"][cls]["next_obj"] = organized["py-client"][ + c_keys[i + 1] + ]["name"] + + for cls in organized["chatinterface"]: + organized["chatinterface"][cls]["prev_obj"] = "Block-Layouts" + organized["chatinterface"][cls]["next_obj"] = "Themes" + + layout_keys = ["row", "column", "tab", "group", "accordion"] + for i, cls in enumerate(layout_keys): + if not i: + organized["building"][cls]["prev_obj"] = "Blocks" + organized["building"][cls]["next_obj"] = layout_keys[i+1].capitalize() + elif i == len(layout_keys) - 1: + organized["building"][cls]["prev_obj"] = layout_keys[i-1].capitalize() + organized["building"][cls]["next_obj"] = "Components" + else: + organized["building"][cls]["prev_obj"] = layout_keys[i-1].capitalize() + organized["building"][cls]["next_obj"] = layout_keys[i+1].capitalize() + + + organized["building"][cls]["prev_obj"] + + + + organized["events_matrix"] = component_events + organized["events"] = events + + with open(JS_CLIENT_README) as f: + readme_content = f.read() + return {"docs": organized, "pages": pages, "js_client": readme_content} + + +docs = organize_docs(docs) + + +def generate(json_path): + with open(json_path, "w+") as f: + json.dump(docs, f) \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/generate_jsons/src/guides/__init__.py b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/guides/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..919dce9aa54d7676147e55f7f7b91b67961d5020 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/generate_jsons/src/guides/__init__.py @@ -0,0 +1,154 @@ +import json +import os +import re + +DIR = os.path.dirname(__file__) +GUIDES_DIR = os.path.abspath(os.path.join(DIR, "../../../../../guides")) +GUIDE_ASSETS_DIR = os.path.join(GUIDES_DIR, "assets") +DEMOS_DIR = os.path.abspath(os.path.join(DIR, "../../../../../demo")) +CN_GUIDES_DIR = os.path.abspath(os.path.join(DIR, "../../../../../guides/cn")) + +UNDERSCORE_TOKEN = "!UNDERSCORE!" + +demos = {} +for demo_folder in os.listdir(DEMOS_DIR): + runfile = os.path.join(DEMOS_DIR, demo_folder, "run.py") + if not os.path.exists(runfile): + continue + with open(runfile) as run_py: + demos[demo_folder] = run_py.read().replace( + 'if __name__ == "__main__":\n demo.launch()', "demo.launch()" + ) + + +def format_name(guide_name): + index = None + if re.match("^[0-9]+_", guide_name): + index = int(guide_name[: guide_name.index("_")]) + guide_name = guide_name[guide_name.index("_") + 1 :] + if guide_name.lower().endswith(".md"): + guide_name = guide_name[:-3] + pretty_guide_name = " ".join([word[0].upper() + word[1:] for word in guide_name.split("-")]) + return index, guide_name, pretty_guide_name + + +guide_folders = sorted(os.listdir(GUIDES_DIR)) +guide_folders.remove("CONTRIBUTING.md") +guide_folders.remove("assets") +guide_folders.remove("cn") + +guides = [] +guides_by_category = [] +guide_names = [] +guide_urls = [] +absolute_index = 0 +for guide_folder in guide_folders: + guide_list = sorted(os.listdir(os.path.join(GUIDES_DIR, guide_folder))) + _, guide_category, pretty_guide_category = format_name(guide_folder) + guides_by_category.append({"category": pretty_guide_category, "guides": []}) + guide_names.append({"category": pretty_guide_category, "guides": []}) + for guide_file in guide_list: + guide_index, guide_name, pretty_guide_name = format_name(guide_file) + with open(os.path.join(GUIDES_DIR, guide_folder, guide_file)) as f: + guide_content = f.read() + + title = guide_content.split("\n")[0] + + metadata_labels = [] + + def get_labeled_metadata(label, is_list=True): + global guide_content + metadata_labels.append(label) + full_label = label + " " + metadata = [] if is_list else None + if full_label in guide_content: + metadata = guide_content.split(full_label)[1].split("\n")[0] + guide_content = guide_content.replace(full_label + metadata, "") + if is_list: + metadata = metadata.split(", ") + return metadata + + tags = get_labeled_metadata("Tags:") + spaces = get_labeled_metadata("Related spaces:") + contributor = get_labeled_metadata("Contributed by", is_list=False) + + url = f"/guides/{guide_name}/" + + guide_content = re.sub( + r"\$code_([a-z _\-0-9]+)", + lambda x: f"```python\n{demos[x.group(1)]}\n```", + guide_content, + ) + guide_content = re.sub( + r"\$demo_([a-z _\-0-9]+)", + lambda x: f"", + guide_content, + ) + + content_no_html = guide_content + + guide_content = "\n".join( + [ + line + for i, line in enumerate(guide_content.split("\n")) + if not any(line.startswith(label) for label in metadata_labels) + ] + ) + guide_content = re.sub( + r"```([a-z]+)\n", + lambda x: f"
",
+            guide_content,
+        )
+        guide_content = re.sub(r"```", "
", guide_content) + guide_content = re.sub( + r"\$code_([a-z _\-0-9]+)", + lambda x: f"
{demos[x.group(1)]}
", + guide_content, + ) + guide_content = re.sub( + r"\$demo_([a-z _\-0-9]+)", + lambda x: f"", + guide_content, + ) + + + + guide_data = { + "name": guide_name, + "category": guide_category, + "pretty_category": pretty_guide_category, + "guide_index": guide_index, + "absolute_index": absolute_index, + "pretty_name": pretty_guide_name, + "content": content_no_html, + "tags": tags, + "spaces": spaces, + "url": url, + "contributor": contributor, + } + guides.append(guide_data) + guides_by_category[-1]["guides"].append(guide_data) + guide_names[-1]["guides"].append({"name": guide_name, "pretty_name": pretty_guide_name, "url": url}) + guide_urls.append(guide_name) + absolute_index += 1 + + +def generate(json_path): + if not os.path.isdir(json_path): + os.mkdir(json_path) + with open(json_path + "guides_by_category.json", 'w+') as f: + json.dump({ + "guides_by_category": guides_by_category, + }, f) + for guide in guides: + with open(json_path + guide["name"] + ".json", 'w+') as f: + json.dump({ + "guide": guide + }, f) + with open(json_path + "guide_names.json", 'w+') as f: + json.dump({ + "guide_names": guide_names, + "guide_urls": guide_urls + }, f) + + diff --git a/testbed/gradio-app__gradio/js/_website/package.json b/testbed/gradio-app__gradio/js/_website/package.json new file mode 100644 index 0000000000000000000000000000000000000000..ca3494d7e47bf793bf78571236a5a3f523a0cafe --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/package.json @@ -0,0 +1,37 @@ +{ + "name": "website", + "version": "0.7.0", + "private": true, + "scripts": { + "dev": "python generate_jsons/generate.py && vite dev", + "build": "vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "prepare": "svelte-kit sync" + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^2.0.0", + "@sveltejs/adapter-static": "^2.0.2", + "@sveltejs/kit": "^1.5.0", + "@tailwindcss/forms": "^0.5.0", + "@tailwindcss/typography": "^0.5.4", + "@types/node": "^20.3.2", + "@types/prismjs": "^1.26.0", + "prismjs": "1.29.0", + "svelte": "^3.59.2", + "svelte-check": "^3.0.1", + "tailwindcss": "^3.1.6", + "tslib": "^2.4.1", + "typescript": "^5.0.0", + "vite": "^4.3.9" + }, + "type": "module", + "dependencies": { + "@sindresorhus/slugify": "^2.2.0", + "@sveltejs/adapter-vercel": "^3.0.3", + "hast-util-to-string": "^3.0.0", + "mdsvex": "^0.11.0", + "postcss": ">=8.3.3 <9.0.0" + } +} diff --git a/testbed/gradio-app__gradio/js/_website/postcss.config.cjs b/testbed/gradio-app__gradio/js/_website/postcss.config.cjs new file mode 100644 index 0000000000000000000000000000000000000000..0794fa7ee4821de784c9cb22540a48eb9fabe1e7 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/postcss.config.cjs @@ -0,0 +1,4 @@ +module.exports = { + extract: "themes.css", + plugins: [require("tailwindcss/nesting"), require("tailwindcss")] +}; diff --git a/testbed/gradio-app__gradio/js/_website/src/app.d.ts b/testbed/gradio-app__gradio/js/_website/src/app.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..ca07268473d752276bdbef34e764fb0ae9d8e387 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/app.d.ts @@ -0,0 +1,14 @@ +// See https://kit.svelte.dev/docs/types#app +// for information about these interfaces +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface Platform {} + } +} + +declare module "*.json"; + +export {}; diff --git a/testbed/gradio-app__gradio/js/_website/src/app.html b/testbed/gradio-app__gradio/js/_website/src/app.html new file mode 100644 index 0000000000000000000000000000000000000000..06dda00f89ee079210d92646ec7650f26e711a74 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/app.html @@ -0,0 +1,13 @@ + + + + + + + %sveltekit.head% + + + +
%sveltekit.body%
+ + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/brand-assets/gradio-logo-with-title.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/brand-assets/gradio-logo-with-title.svg new file mode 100644 index 0000000000000000000000000000000000000000..47891bae68037c8fc259255c959ef2f9bf51ddb2 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/brand-assets/gradio-logo-with-title.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/brand-assets/gradio-logo.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/brand-assets/gradio-logo.svg new file mode 100644 index 0000000000000000000000000000000000000000..eea90d4871e8aa99be911ff7f944dbad350e18f8 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/brand-assets/gradio-logo.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/copy.js b/testbed/gradio-app__gradio/js/_website/src/lib/assets/copy.js new file mode 100644 index 0000000000000000000000000000000000000000..43fa0b30e723af717b3d0a819e6478ff74c7a84a --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/copy.js @@ -0,0 +1,4 @@ +export const svgCopy = + ''; +export const svgCheck = + ''; diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/demo_code.js b/testbed/gradio-app__gradio/js/_website/src/lib/assets/demo_code.js new file mode 100644 index 0000000000000000000000000000000000000000..39d4318774022affac8f0434f8ea048c474464a9 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/demo_code.js @@ -0,0 +1,20 @@ +export const sketch = `
import gradio as gr
+def sketch_recognition(img):
+    pass # Implement your sketch recognition model here...
+
+gr.Interface(fn=sketch_recognition, inputs="sketchpad", outputs="label").launch()
+
`; + +export const chat = `
import gradio as gr
+def chat(message, history):
+    pass  # Implement your chatbot here...
+
+gr.ChatInterface(fn=chat).launch()
+
`; + +export const stable_diffusion = `
import gradio as gr
+def generate(prompt):
+    pass  # Implement your image generation model here...
+
+gr.Interface(fn=generate, inputs="prompt", outputs="image").launch()
+
`; diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/gradio.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/gradio.svg new file mode 100644 index 0000000000000000000000000000000000000000..95d73ab78792dbe72f210618b65f18d648381863 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/gradio.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/anchor.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/anchor.svg new file mode 100644 index 0000000000000000000000000000000000000000..94393f8f8f6ef4b4c258723b80a76dce0fa02dd3 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/anchor.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/dataflow.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/dataflow.svg new file mode 100644 index 0000000000000000000000000000000000000000..43e7e86f6822b2a0e5559105c275edcb479d4348 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/dataflow.svg @@ -0,0 +1,188 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/esc.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/esc.svg new file mode 100644 index 0000000000000000000000000000000000000000..d0383260c09579b3253d77ec5dd1599bc335d330 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/esc.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/github-black.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/github-black.svg new file mode 100644 index 0000000000000000000000000000000000000000..977cf9d7e59d99946025906c104f12b529907345 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/github-black.svg @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/github.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/github.svg new file mode 100644 index 0000000000000000000000000000000000000000..787f059c280e9493afa9bab3a89f03d9090a4ea6 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/github.svg @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/spaces-logo.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/spaces-logo.svg new file mode 100644 index 0000000000000000000000000000000000000000..6ee3003c311e4578ea64dbc382fcefe08acc940f --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/spaces-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/twitter.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/twitter.svg new file mode 100644 index 0000000000000000000000000000000000000000..c9fa778f07ec43261e2fc352796f1cd9d0f42c1e --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/img/twitter.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/index.ts b/testbed/gradio-app__gradio/js/_website/src/lib/assets/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..3ccdd6c4150649c52fb69961274bda12148eea98 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/index.ts @@ -0,0 +1,58 @@ +export { default as gradio_logo } from "./gradio.svg"; +export { default as twitter } from "./img/twitter.svg"; +export { default as github } from "./img/github.svg"; +export { default as github_black } from "./img/github-black.svg"; + +import google from "./logos/google.svg"; +import amazon from "./logos/amazon.svg"; +import fb from "./logos/fb.svg"; +import cisco from "./logos/cisco.svg"; +import twitter from "./logos/twitter.svg"; +import vm from "./logos/vmware.svg"; +import hf from "./logos/huggingface.svg"; +import siemens from "./logos/siemens.svg"; +import mit from "./logos/mit-svg-50.png"; +import stanford from "./logos/stanford.svg"; +import uipath from "./logos/uipath.svg"; +import unify from "./logos/unifyid.svg"; +import humans from "./logos/humaniseai.svg"; +import factmata from "./logos/factmata.svg"; +import wns from "./logos/wns.png"; + +import _tweets from "./tweets.json"; + +export const logos = [ + { img: google, contrast: false, description: "Google Logo" }, + { img: amazon, contrast: true, description: "Amazon logo" }, + { img: fb, contrast: false, description: "Facebook logo" }, + { img: cisco, contrast: false, description: "CISCO logo" }, + { img: twitter, contrast: false, description: "Twitter logo" }, + { img: vm, contrast: false, description: "VMwarelogo" }, + { img: hf, contrast: false, description: "Hugging Face logo" }, + { img: siemens, contrast: false, description: "Siemens logo" }, + { img: mit, contrast: true, description: "MIT logo" }, + { img: stanford, contrast: false, description: "Stanford logo" }, + { img: uipath, contrast: false, description: "UI Path logo" }, + { img: unify, contrast: false, description: "UnifyID logo" }, + { img: humans, contrast: true, description: "Humanise AI logo" }, + { img: factmata, contrast: true, description: "Factmata logo" }, + { img: wns, contrast: true, description: "WNS logo" } +]; + +export const twitter_pics = ( + Object.entries(import.meta.glob("./twitter/**", { eager: true })) as [ + string, + { default: string } + ][] +).reduce( + (a, [k, mod]) => { + a[k.replace("./twitter/", "")] = mod.default; + return a; + }, + {} as Record +); + +export const tweets = _tweets.map((x) => ({ + ...x, + profile_pic: twitter_pics[x.profile_pic] +})); diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/amazon.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/amazon.svg new file mode 100644 index 0000000000000000000000000000000000000000..92c6c3c2dcaf5ab44c7da5a1ad54d38e19859b0e --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/amazon.svg @@ -0,0 +1,20 @@ + + + + + +image/svg+xml + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/cisco.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/cisco.svg new file mode 100644 index 0000000000000000000000000000000000000000..31c8e28d7bbf45409e858a736acaa61e65e1a763 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/cisco.svg @@ -0,0 +1 @@ + 480x220-Cisco-Partner-Cameo-Global \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/factmata.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/factmata.svg new file mode 100644 index 0000000000000000000000000000000000000000..95edeb4b44b863c37d939ef5540ab2292a68f915 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/factmata.svg @@ -0,0 +1,18 @@ + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/fb.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/fb.svg new file mode 100644 index 0000000000000000000000000000000000000000..071553851665ca5c2e7768bec51954df4590bd8d --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/fb.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/google.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/google.svg new file mode 100644 index 0000000000000000000000000000000000000000..8eb8c82786d8e87c8e547b766646184861904ec3 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/google.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/gradio.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/gradio.svg new file mode 100644 index 0000000000000000000000000000000000000000..64bab15b1fca6524c679043080b927a66e71a498 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/gradio.svg @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/huggingface.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/huggingface.svg new file mode 100644 index 0000000000000000000000000000000000000000..5843e856977009f5c1911f45e657d07799f6918e --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/huggingface.svg @@ -0,0 +1,182 @@ + + + + + icon + Created with Sketch. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Hugging Face + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/humaniseai.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/humaniseai.svg new file mode 100644 index 0000000000000000000000000000000000000000..67a262dc8aad05e7534b317e7abcd50dbdb2d6c2 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/humaniseai.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/siemens.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/siemens.svg new file mode 100644 index 0000000000000000000000000000000000000000..6b02d60f2435e18926e8bc3b8502499b0444b667 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/siemens.svg @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/stanford.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/stanford.svg new file mode 100644 index 0000000000000000000000000000000000000000..59c4677ecc011244e16ba7825c6a07f04bbd4fea --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/stanford.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/twitter.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/twitter.svg new file mode 100644 index 0000000000000000000000000000000000000000..39dedc3f0748f9d2a95bade580ab5098243e1182 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/twitter.svg @@ -0,0 +1,64 @@ + + + + + + + + + + image/svg+xml + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/uipath.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/uipath.svg new file mode 100644 index 0000000000000000000000000000000000000000..0e0c1244bd8cc5b065c5f9e1dca6de6d8ba0472a --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/uipath.svg @@ -0,0 +1,9 @@ + + + + UiPath_Logo_full + Created with Sketch. + + + + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/unifyid.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/unifyid.svg new file mode 100644 index 0000000000000000000000000000000000000000..b7dbd4473af11d3cdf82c709aa076ef74d1c1f57 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/unifyid.svg @@ -0,0 +1,30 @@ + + + + +UnifyID + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/vmware.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/vmware.svg new file mode 100644 index 0000000000000000000000000000000000000000..3107c1d231e11c5c92b9ed5c182b911e64adc488 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/logos/vmware.svg @@ -0,0 +1,42 @@ + +VMware logoAn information technology company based in Palo Alto, California, United Statesimage/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/prism.css b/testbed/gradio-app__gradio/js/_website/src/lib/assets/prism.css new file mode 100644 index 0000000000000000000000000000000000000000..7185cdf6bfcd8c8e3c89f7c83939c26615c99d20 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/prism.css @@ -0,0 +1,151 @@ +/* PrismJS 1.20.0 +https://prismjs.com/download.html#themes=prism&languages=python */ +/** + * prism.js default theme for JavaScript, CSS and HTML + * Based on dabblet (http://dabblet.com) + * @author Lea Verou + */ + +code[class*="language-"], +pre[class*="language-"] { + word-wrap: normal; + color: black; + font-size: 1em; + line-height: 1.5; + font-family: Consolas, Monaco, "Andale Mono", "Ubuntu Mono", monospace; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; + text-align: left; + text-shadow: 0 1px white; + white-space: pre; + word-break: normal; + word-spacing: normal; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + font-size: 0.9em; +} + +pre[class*="language-"]::-moz-selection, +pre[class*="language-"] ::-moz-selection, +code[class*="language-"]::-moz-selection, +code[class*="language-"] ::-moz-selection { + background: #b3d4fc; + text-shadow: none; +} + +pre[class*="language-"]::selection, +pre[class*="language-"] ::selection, +code[class*="language-"]::selection, +code[class*="language-"] ::selection { + background: #b3d4fc; + text-shadow: none; +} + +@media print { + code[class*="language-"], + pre[class*="language-"] { + text-shadow: none; + } +} + +/* Code blocks */ +pre[class*="language-"] { + margin: 0.5em 0; + padding: 1em; + overflow: auto; +} + +:not(pre) > code[class*="language-"], +pre[class*="language-"] { + background: rgb(249, 250, 251); +} + +.prose code[class*="language-"], +.prose pre[class*="language-"] { + font-size: 0.9em; +} + +/* Inline code */ +:not(pre) > code[class*="language-"] { + border-radius: 0.3em; + padding: 0.1em; + white-space: normal; +} + +.token.comment, +.token.prolog, +.token.doctype, +.token.cdata { + color: slategray; +} + +.token.punctuation { + color: #999; +} + +.token.namespace { + opacity: 0.7; +} + +.token.property, +.token.tag, +.token.boolean, +.token.number, +.token.constant, +.token.symbol, +.token.deleted { + color: #905; +} + +.token.selector, +.token.attr-name, +.token.string, +.token.char, +.token.builtin, +.token.inserted { + color: #690; +} + +.token.operator, +.token.entity, +.token.url, +.language-css .token.string, +.style .token.string { + /* This background color was intended by the author of this theme. */ + background: hsla(0, 0%, 100%, 0.5); + color: #9a6e3a; +} + +.token.atrule, +.token.attr-value, +.token.keyword { + color: #07a; +} + +.token.function, +.token.class-name { + color: #dd4a68; +} + +.token.regex, +.token.important, +.token.variable { + color: #e90; +} + +.token.important, +.token.bold { + font-weight: bold; +} +.token.italic { + font-style: italic; +} + +.token.entity { + cursor: help; +} diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/style.css b/testbed/gradio-app__gradio/js/_website/src/lib/assets/style.css new file mode 100644 index 0000000000000000000000000000000000000000..15bdcba550d53a477c98e94e49bdd163aedc7683 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/style.css @@ -0,0 +1,183 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +.link { + @apply font-semibold hover:text-orange-500 transition-colors; +} +.thin-link { + @apply hover:text-orange-500 transition-colors; +} +.thinner-link { + @apply hover:text-orange-500 transition-colors; +} + +.prose :where(img):not(:where([class~="not-prose"] *)) { + margin-top: 0; + margin-bottom: 0; +} + +.group:hover .group-hover\:flex { + /* for some reason, group-hover:flex not working on mobile */ + display: flex; +} +.group:active .group-active\:flex { + /* for some reason, group-active:flex not working on mobile */ + display: flex; +} +@media (min-width: 640px) { + /* for some reason, sm:block not working */ + .sm\:block { + @apply block; + } +} +@media (min-width: 768px) { + /* for some reason, md:block not working */ + .md\:block { + @apply block; + } +} + +@layer base { + a.text-link { + @apply font-semibold text-gray-800 underline decoration-orange-500 underline-offset-2 hover:text-orange-500; + } +} + +@layer utilities { + @variants hover, focus { + .filter-none { + filter: none; + } + .filter-grayscale { + filter: grayscale(100%); + } + .shadow-alternate-sm { + box-shadow: + 0px 5px 5px rgba(0, 0, 0, 0.03), + 0px 2px 2px rgba(0, 0, 0, 0.03), + 0px 0px 1px rgba(0, 0, 0, 0.03); + } + .shadow-alternate { + box-shadow: + 0px 10px 20px rgba(0, 0, 0, 0.04), + 0px 2px 6px rgba(0, 0, 0, 0.04), + 0px 0px 1px rgba(0, 0, 0, 0.04); + } + .shadow-alternate-xl { + box-shadow: + 0px 24px 32px rgba(0, 0, 0, 0.04), + 0px 16px 24px rgba(0, 0, 0, 0.04), + 0px 4px 8px rgba(0, 0, 0, 0.04), + 0px 0px 1px rgba(0, 0, 0, 0.04); + } + } +} + +/* index */ +.active-example-tab { + @apply border-orange-500 rounded-full text-orange-500 ring-1 bg-orange-50 ring-orange-200 hover:!text-orange-500 cursor-pointer shadow shadow-orange-200; +} + +/* guides */ +.prose > p > img { + @apply max-w-full mx-auto my-0 w-4/5; +} +.prose > p > video { + @apply max-w-full mx-auto my-0 w-4/5; +} + +.prose code::before { + display: none; +} + +.prose code::after { + display: none; +} + +/* docs & guides */ +.thin-link.current-nav-link { + @apply text-orange-500; +} +.thin-link.current-nav-link:not(.subheading) { + @apply border-orange-500 md:border-l-2 pl-4; +} +.link.current-nav-link { + @apply border-orange-500 text-orange-500; +} +.thinner-link.current-nav-link { + @apply border-orange-500 text-orange-500; +} +.second-nav-link { + @apply border-l-2 border-gray-100 px-3; +} +.current-nav-link { + @apply border-orange-500 text-orange-500; +} + +/* docs */ +.selected-demo { + @apply font-semibold bg-gray-50 rounded text-orange-500; +} +code.language-python { + @apply !leading-7 !whitespace-pre-wrap !break-all; +} +code.language-bash { + @apply !leading-7 !whitespace-pre-wrap !break-all; +} + +.group-hover-visible { + @apply group-hover:visible; +} +.anchor-img { + @apply w-7 max-w-full inline-block m-0 ml-2; +} +.anchor-img-small { + @apply w-5 max-w-full inline-block m-0 ml-0.5; +} +.selected-version { + @apply font-semibold text-orange-500; +} +.selected-version:before { + content: "• "; +} + +/* copy button */ +.clipboard-button { + @apply absolute right-0 px-1.5 pt-0.5 pb-1 m-4 text-gray-500 text-sm z-[100] opacity-0 duration-100; +} +.clipboard-button:hover { + @apply cursor-pointer; +} +.clipboard-button:hover > svg { + @apply fill-gray-700; +} +.clipboard-button:focus { + @apply outline-0; +} +.codeblock { + @apply relative; +} +.codeblock:hover > .clipboard-button { + @apply opacity-100 duration-200; +} + +[type="search"]::-webkit-search-cancel-button { + @apply appearance-none h-5 w-5; + -webkit-appearance: none; + background-image: url("/src/lib/assets/img/esc.svg"); + background-size: 20px 20px; +} + +.view-code { + @apply w-16 p-2 mx-auto hover:bg-gray-100; + background: rgb(249, 250, 251); +} + +/* demos */ +.selected-demo-tab { + @apply font-semibold text-orange-500 rounded-t-md border-2 border-gray-100 border-b-0 bg-white; +} +.selected-demo-window { + @apply rounded-b-md border-2 border-gray-100 -mt-0.5; +} diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/tweets.json b/testbed/gradio-app__gradio/js/_website/src/lib/assets/tweets.json new file mode 100644 index 0000000000000000000000000000000000000000..9b984bca0bedfb4058cbbc8f5fb81eb2b7da3bf7 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/tweets.json @@ -0,0 +1,83 @@ +[ + { + "name": "Amar Saini", + "handle": "_Epoching_", + "link": "https://twitter.com/_Epoching_/status/1471091318482825219", + "content": "Just built a ️@Gradio app for a video related deep learning project.
I’m astonished by how simple it is to use & how elegant it looks! Lots and lots of great features & flexibility. Thanks for making this ❤ ", + "likes": 47, + "timestamp": "7:14 AM · Dec 15, 2021", + "profile_pic": "pwMrDOBv_400x400.jpeg" + }, + { + "name": "Will Rice", + "handle": "_Will_Rice", + "link": "https://twitter.com/_Will_Rice/status/1430258610131582979", + "content": "Just tried out @Gradio and I am very impressed. Only took like 10mins to put together a #tts demo.", + "likes": 11, + "timestamp": "4:00 PM · Aug 24, 2021", + "profile_pic": "LsCnjnsl_400x400.jpeg" + }, + { + "name": "Roxana Daneshjou MD/PhD", + "handle": "RoxanaDaneshjou", + "link": "https://twitter.com/RoxanaDaneshjou/status/1418399829944721415", + "content": "Honestly, without @Gradio, we would not be doing a real time AI trial. We have many other ideas for algorithms we want to test through clinical trials, and we know it's possible thanks to @Gradio.", + "likes": 15, + "timestamp": "7:37 PM · Jul 22, 2021", + "profile_pic": "ITFspAMm_x96.jpg" + }, + { + "name": "Vinay Prabhu", + "handle": "vinayprabhu", + "link": "https://twitter.com/vinayprabhu/status/1324409497641652225", + "content": "Dear #MachineLearning twitter,
If you haven't typed:
$ pip install gradio
yet, now would be a damn good time.
Especially if you are working in computer vision & deploying models in the real world. ", + "likes": 19, + "timestamp": "12:53 PM · Nov 5, 2020", + "profile_pic": "1013607349943058433.jpg" + }, + { + "name": "Tanishq Mathew Abraham", + "handle": "iScienceLuvr", + "link": "https://twitter.com/iScienceLuvr/status/1460716613032837120", + "content": "After training an ML model, the BEST way to showcase it to the world is to make a demo for others to try!
The easiest way to do so is w/ @Gradio, hosted on @HuggingFace Spaces.
Read my new blog post to learn how to do this (w/ appearance by @fastdotai)!
https://tmabraham.github...", + "likes": 285, + "timestamp": "4:09 PM · Nov 16, 2021", + "profile_pic": "ksO1TT2P_400x400.jpeg" + }, + { + "name": "Dipankar Mazumdar", + "handle": "Dipankartnt", + "link": "https://twitter.com/Dipankartnt/status/1427750254586253318", + "content": "I love low-code ML solutions like @Gradio that do not restricts anyone from making ML accessible. #machinelearning #datascience", + "likes": 0, + "timestamp": "5:52 PM · Aug 17, 2021", + "profile_pic": "GDLc7Oe4_400x400.jpeg" + }, + { + "name": "Charly Wargnier", + "handle": "DataChaz", + "link": "https://twitter.com/DataChaz/status/1351290055894179842", + "content": "Pretty neat that @GradioML!\uD83D\uDC0D\uD83D\uDD25
+ Generate an easy-to-use UI for your #ML model, function, or #API with only a few lines of code!
+ Integrate directly into your @ProjectJupyter notebook
+ or share a link with anyone

h/t
@VincentTerrasi #MachineLearning www.gradio.app", + "likes": 18, + "timestamp": "5:07 PM · Jan 18, 2021", + "profile_pic": "1362781887098454025.jpg" + }, + { + "name": "Chua Chin Hon", + "handle": "chinhon", + "link": "https://twitter.com/chinhon/status/1452510073452859392", + "content": "What's exciting about ML in 2021 is how the building blocks r coming together. Built this headline writer using:
- AutoNLP to finetune Bart
- @Gradio for the UI
- @huggingface's Spaces for hosting and compute
Try it here: https://huggingface.co/spa...
", + "likes": 61, + "timestamp": "10:39 PM · Oct 24, 2021", + "profile_pic": "R1gj6nb3_x96.jpg" + }, + { + "name": "Poonam Ligade @Jarvislabs.ai", + "handle": "Poonamligade", + "link": "https://twitter.com/Poonamligade/status/1521740054368251905", + "content": "My son is fascinated with all things about dinosaurs. I built a \uD83E\uDD96 \uD83E\uDD95 classifier for him as homework for the first week of the fastai cohort.
I used @Gradio, and deployed on @jarvislabsai. http://dinoapp.jarvis..", + "likes": 305, + "timestamp": "2:34 AM · May 4, 2022", + "profile_pic": "8vyTl51q_400x400.jpeg" + } +] diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/heart.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/heart.svg new file mode 100644 index 0000000000000000000000000000000000000000..a6cc7d8478cefe8db64fcfe93f980c94427a057c --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/heart.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/logo.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/logo.svg new file mode 100644 index 0000000000000000000000000000000000000000..49f4544127067e3db8f61e1ba465199abc3308a1 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/redheart.svg b/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/redheart.svg new file mode 100644 index 0000000000000000000000000000000000000000..b1562fe19033a354da1c586767fc133fef58a1a1 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/assets/twitter/redheart.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/Demos.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/Demos.svelte new file mode 100644 index 0000000000000000000000000000000000000000..0ed2b95eba8bf52c2309ada9f45cfbf25b1a92af --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/Demos.svelte @@ -0,0 +1,46 @@ + + +
+ + + + +
{@html highlighted_code}
+
+
+ +{#key name} + {#if on_main} + + {:else} + + {/if} +{/key} diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/DemosLanding.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/DemosLanding.svelte new file mode 100644 index 0000000000000000000000000000000000000000..f8f7e2e4aba0d429d77dc376b51190da9b7bb06d --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/DemosLanding.svelte @@ -0,0 +1,64 @@ + + +
+ +
+
+ {#each tabs as { demo, code }, i (demo)} +
+
+ {@html code} +
+
+ {#key demo} + + {/key} +
+
+ {/each} +
diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/DocsNav.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/DocsNav.svelte new file mode 100644 index 0000000000000000000000000000000000000000..a46dd181c97c48841eb8f5e26fbbbe7f27f305b3 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/DocsNav.svelte @@ -0,0 +1,230 @@ + + + + + + +
(show_nav = false)} + class:hidden={!show_nav} + class="min-w-[200px] navigation mobile-nav overflow-y-auto fixed backdrop-blur-lg z-50 bg-gray-200/50 pr-6 pl-4 py-4 -ml-4 h-full inset-0 w-5/6 lg:inset-auto lg:ml-0 lg:z-0 lg:backdrop-blur-none lg:navigation lg:p-0 lg:pb-4 lg:h-screen lg:leading-relaxed lg:sticky lg:top-0 lg:text-md lg:block rounded-t-xl lg:bg-gradient-to-r lg:from-white lg:to-gray-50 lg:overflow-x-clip lg:w-2/12" + id="mobile-nav" +> + + + + +

Building Demos

+ Interface + ChatInterfaceNEW + TabbedInterface + + Blocks + +

Block Layouts

+ + Row + Column + Tab + Group + Accordion + + Components + {#each Object.entries(components) as [name, obj] (name)} + {obj.name} + {/each} +

Helpers

+ {#each Object.entries(helpers) as [name, obj] (name)} + {obj.name} + {/each} +

Modals

+ {#each Object.entries(modals) as [name, obj] (name)} + {obj.name} + {/each} + +

Routes

+ {#each Object.entries(routes) as [name, obj] (name)} + {obj.name} + {/each} + +

Other

+ + Flagging + Themes + + Python Client + {#each Object.entries(py_client) as [name, obj] (name)} + {obj.name} + {/each} + JavaScript Client +
diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/EventListeners.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/EventListeners.svelte new file mode 100644 index 0000000000000000000000000000000000000000..e347d46d7267d71d2ca2a3265fb5b1f603dbfd26 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/EventListeners.svelte @@ -0,0 +1,105 @@ + + +
+

Description

+

+ Event listeners allow you to capture and respond to user interactions with + the UI components you've defined in a Gradio Blocks app. When a user + interacts with an element, such as changing a slider value or uploading an + image, a function is called. +

+
+ +
+

+ Supported Event Listeners +

+

+ The {fns[0].parent.replace("gradio.", "")} + component supports the following event listeners. Each event listener takes the + same parameters, which are listed in the + Event Arguments table below. +

+ + + + + + + + + + {#each fns as fn} + + + + + {/each} + +
ListenerDescription
+

+ {fn.parent}.{fn.name}(fn, ···) +

+
+

{fn.description}

+
+
+ +
+

+ Event Arguments +

+ + + + + + + + + + {#each fns[0].parameters as param} + {#if param["name"] != "self"} + + + + + {/if} + {/each} + +
ParameterDescription
+ + {param["name"]} + +

{param["annotation"]}

+ {#if "default" in param} +

+ default: {param["default"]} +

+ {:else if !("kwargs" in param)} +

required

+ {/if} +
+

{param["doc"] || ""}

+
+
+ + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/Footer.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/Footer.svelte new file mode 100644 index 0000000000000000000000000000000000000000..3768b1a93962c629102270351681bd18c6295f44 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/Footer.svelte @@ -0,0 +1,95 @@ + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/FunctionDoc.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/FunctionDoc.svelte new file mode 100644 index 0000000000000000000000000000000000000000..6b3e3f877593f547172abaad01fc0b831ceeb672 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/FunctionDoc.svelte @@ -0,0 +1,137 @@ + + + + +
+
+

+ {fn.name} + +

+
+ + {#if fn.override_signature} +
+
{fn.override_signature}
+
+ {:else} +
+
{fn.parent}.{fn.name}({#each fn.parameters as param}{#if !("kwargs" in param) && !("default" in param) && param.name != "self"}{param.name}, {/if}{/each}···)
+
+ {/if} + +

+ Description + +

+

{@html fn.description}

+ + {#if fn.example} +

+ Example Usage + +

+
+
{@html fn.highlighted_example}
+
+ {/if} + + {#if (fn.parameters.length > 0 && fn.parameters[0].name != "self") || fn.parameters.length > 1} +

+ Agruments + +

+ + + + + + + + + + {#each fn.parameters as param} + {#if param["name"] != "self"} + + + + + {/if} + {/each} + +
ParameterDescription
+ + {param["name"]} + +

{param["annotation"]}

+ {#if "default" in param} +

+ default: {param["default"]} +

+ {:else if !("kwargs" in param)} +

required

+ {/if} +
+

{param["doc"] || ""}

+
+ {/if} +
diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/Header.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/Header.svelte new file mode 100644 index 0000000000000000000000000000000000000000..2b11c86ddff0a109b4ee92f83f00d35ee9183ed1 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/Header.svelte @@ -0,0 +1,100 @@ + + +
+ + Gradio logo + + (click_nav = !click_nav)} + > + + + + + + +
diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/MetaTags.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/MetaTags.svelte new file mode 100644 index 0000000000000000000000000000000000000000..4317a36059264f739d6ff44ec12ee61a8cb7e5b0 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/MetaTags.svelte @@ -0,0 +1,33 @@ + + + + {title} + + + + + + + + + + + + + + + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/VersionDropdown.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/VersionDropdown.svelte new file mode 100644 index 0000000000000000000000000000000000000000..99d8bde48b7f4ac00d778b9c28fea55aa562db9f --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/VersionDropdown.svelte @@ -0,0 +1,42 @@ + + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/clickOutside.ts b/testbed/gradio-app__gradio/js/_website/src/lib/components/clickOutside.ts new file mode 100644 index 0000000000000000000000000000000000000000..0a18c9aae995242f85eec4d3e06dda5ca281e760 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/clickOutside.ts @@ -0,0 +1,26 @@ +/** Dispatch event on click outside of node */ +namespace svelte.JSX { + interface HTMLProps { + onclick_outside?: (e: CustomEvent) => void; + } +} + +export function clickOutside(node: Node) { + const handleClick = (event: MouseEvent) => { + if ( + node && + !node.contains(event.target as Node) && + !event.defaultPrevented + ) { + node.dispatchEvent(new CustomEvent("click_outside", node as any)); + } + }; + + document.addEventListener("click", handleClick, true); + + return { + destroy() { + document.removeEventListener("click", handleClick, true); + } + }; +} diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/components/icons/DownloadIcon.svelte b/testbed/gradio-app__gradio/js/_website/src/lib/components/icons/DownloadIcon.svelte new file mode 100644 index 0000000000000000000000000000000000000000..28487d8f5b3ddad8073a5ea63ea22b41daf038ef --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/components/icons/DownloadIcon.svelte @@ -0,0 +1,6 @@ + + diff --git a/testbed/gradio-app__gradio/js/_website/src/lib/utils.ts b/testbed/gradio-app__gradio/js/_website/src/lib/utils.ts new file mode 100644 index 0000000000000000000000000000000000000000..e6a1fc55f9da6df16a0687d26a6e2e50c2533171 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/lib/utils.ts @@ -0,0 +1,66 @@ +import { onDestroy } from "svelte"; +import { writable } from "svelte/store"; + +const sizes = { + sm: "(min-width: 640px)", + md: "(min-width: 768px)", + lg: "(min-width: 1024px)", + xl: "(min-width: 1280px)", + "2xl": "(min-width: 1536px)" +} as const; + +const _default = { + sm: false, + md: false, + lg: false, + xl: false, + "2xl": false +}; + +export const media_query = () => { + const { subscribe, update } = writable(_default); + + const listeners: { + [key: string]: [MediaQueryList, (ev: MediaQueryListEvent) => any]; + } = {}; + const onChange = (key: string) => () => + update((s) => ({ ...s, [key]: !!listeners[key][0].matches })); + + if (typeof window !== "undefined") { + for (const key in sizes) { + const mql = window.matchMedia(sizes[key as keyof typeof sizes]); + const listener = onChange(key); + + mql.addEventListener("change", listener); + + listeners[key] = [mql, listener]; + } + + onDestroy(() => { + for (const key in listeners) { + const [_mql, _listener] = listeners[key]; + _mql.removeEventListener("change", _listener); + } + }); + } + + return { subscribe }; +}; + +import slugify from "@sindresorhus/slugify"; + +export function make_slug_processor() { + const seen_slugs = new Map(); + + return function (name: string) { + const slug = slugify(name, { separator: "-", lowercase: true }); + let count = seen_slugs.get(slug); + if (count) { + seen_slugs.set(slug, count + 1); + return `${slug}-${count + 1}`; + } else { + seen_slugs.set(slug, 1); + return slug; + } + }; +} diff --git a/testbed/gradio-app__gradio/js/_website/src/routes/+error.svelte b/testbed/gradio-app__gradio/js/_website/src/routes/+error.svelte new file mode 100644 index 0000000000000000000000000000000000000000..45341617a3461c3968bb0f0a8f208c38190b2528 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/routes/+error.svelte @@ -0,0 +1,47 @@ + + +
+
+ +

+ {$page.status} +

+

+ {$page.error?.message} +

+ +
+
+ + diff --git a/testbed/gradio-app__gradio/js/_website/src/routes/+layout.server.ts b/testbed/gradio-app__gradio/js/_website/src/routes/+layout.server.ts new file mode 100644 index 0000000000000000000000000000000000000000..4c34aea653c974e3ba1ff501f6e53b7839894880 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/routes/+layout.server.ts @@ -0,0 +1,10 @@ +import { redirect } from "@sveltejs/kit"; +import { redirects } from "./redirects.js"; + +export const prerender = true; + +export async function load({ url }: any) { + if (url.pathname in redirects) { + throw redirect(308, redirects[url.pathname as keyof typeof redirects]); + } +} diff --git a/testbed/gradio-app__gradio/js/_website/src/routes/+layout.svelte b/testbed/gradio-app__gradio/js/_website/src/routes/+layout.svelte new file mode 100644 index 0000000000000000000000000000000000000000..3abd1ea6a0011f0152b52e5dddb6256584868727 --- /dev/null +++ b/testbed/gradio-app__gradio/js/_website/src/routes/+layout.svelte @@ -0,0 +1,59 @@ + + + + + + + + + + + + + +
+ + + +