first commit

This commit is contained in:
hailin 2025-07-06 06:30:20 +00:00
commit 9102b22125
3039 changed files with 384551 additions and 0 deletions

20
Dockerfile Normal file
View File

@ -0,0 +1,20 @@
FROM python:3.10-slim
WORKDIR /app
# 安装基础构建依赖
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
git \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
# 拷贝 Gradio 源码(注意路径)
COPY gradio-5.35.0 /app/gradio-5.35.0
# 安装 Gradio 源码
RUN pip install --upgrade pip && \
pip install -e ./gradio-5.35.0
# 验证安装:打印版本号
CMD ["python", "-c", "import gradio; print('✅ Gradio version =', gradio.__version__)"]

56
build-and-run.sh Normal file
View File

@ -0,0 +1,56 @@
#!/bin/bash
set -e # ❗ 遇到任何错误就立即退出
set -o pipefail
# ======== 配置参数 ========
IMAGE_NAME="gradio-local:5.35.0"
CONTAINER_NAME="gradio-container"
GIT_DIR="./gradio-5.35.0"
PORT=7860
PROXY_URL="http://127.0.0.1:7890"
echo "🛠️ 开始构建并部署 Gradio 镜像..."
# ======== 检查旧容器并删除 ========
if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
echo "🛑 停止并删除旧容器 ${CONTAINER_NAME}..."
docker stop "${CONTAINER_NAME}" || true
docker rm "${CONTAINER_NAME}" || true
fi
# ======== 删除旧镜像(如果存在) ========
if docker images --format '{{.Repository}}:{{.Tag}}' | grep -q "^${IMAGE_NAME}$"; then
echo "🧹 删除旧镜像 ${IMAGE_NAME}..."
docker rmi "${IMAGE_NAME}" || true
fi
# ======== 进入源码目录并拉取代码 ========
if [ ! -d "${GIT_DIR}/.git" ]; then
echo "❌ 错误:找不到 Git 仓库目录:${GIT_DIR}"
exit 1
fi
echo "📥 拉取最新代码..."
cd "${GIT_DIR}"
git reset --hard
git pull
cd ..
# ======== 构建 Docker 镜像 ========
echo "🐳 开始构建 Docker 镜像..."
docker build \
--build-arg proxy="${PROXY_URL}" \
--network=host \
-t "${IMAGE_NAME}" \
-f Dockerfile . # 你应当把 Dockerfile 放在当前目录(或加路径)
# ======== 启动容器 ========
echo "🚀 启动 Gradio 容器(后台运行)..."
docker run -d \
--name "${CONTAINER_NAME}" \
--network=host \
"${IMAGE_NAME}"
echo "✅ Gradio 容器已启动成功,监听端口 ${PORT}"

View File

@ -0,0 +1,8 @@
# Changesets
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
with multi-package repos, or single-package repos to help you version and publish your code. You can
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
We have a quick list of common questions to get you started engaging with this project in
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)

View File

@ -0,0 +1,332 @@
const { getPackagesSync } = require("@manypkg/get-packages");
const dependents_graph = require("@changesets/get-dependents-graph");
const gh = require("@changesets/get-github-info");
const { existsSync, readFileSync, writeFileSync } = require("fs");
const { join } = require("path");
const { getInfo, getInfoFromPullRequest } = gh;
const pkg_data = getPackagesSync(process.cwd());
const { packages, rootDir } = pkg_data;
const dependents = dependents_graph.getDependentsGraph({
packages,
root: pkg_data.rootPackage
});
/**
* @typedef {{packageJson: {name: string, python?: boolean}, dir: string}} Package
*/
/**
* @typedef {{summary: string, id: string, commit: string, releases: {name: string}}} Changeset
*/
/**
*
* @param {string} package_name The name of the package to find the directories for
* @returns {string[]} The directories for the package
*/
function find_packages_dirs(package_name) {
/** @type {string[]} */
let package_dirs = [];
/** @type {Package | undefined} */
const _package = packages.find((p) => p.packageJson.name === package_name);
if (!_package) throw new Error(`Package ${package_name} not found`);
package_dirs.push(_package.dir);
if (_package.packageJson.python) {
package_dirs.push(join(_package.dir, ".."));
}
return package_dirs;
}
let lines = {
_handled: []
};
const changelogFunctions = {
/**
*
* @param {Changeset[]} changesets The changesets that have been created
* @param {any} dependenciesUpdated The dependencies that have been updated
* @param {any} options The options passed to the changelog generator
* @returns {Promise<string>} The release line for the dependencies
*/
getDependencyReleaseLine: async (
changesets,
dependenciesUpdated,
options
) => {
if (!options.repo) {
throw new Error(
'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]'
);
}
if (dependenciesUpdated.length === 0) return "";
const changesetLink = `- Updated dependencies [${(
await Promise.all(
changesets.map(async (cs) => {
if (cs.commit) {
let { links } = await getInfo({
repo: options.repo,
commit: cs.commit
});
return links.commit;
}
})
)
)
.filter((_) => _)
.join(", ")}]:`;
const updatedDepenenciesList = dependenciesUpdated.map(
/**
*
* @param {any} dependency The dependency that has been updated
* @returns {string} The formatted dependency
*/
(dependency) => {
const updates = dependents.get(dependency.name);
if (updates && updates.length > 0) {
updates.forEach((update) => {
if (!lines[update]) {
lines[update] = {
dirs: find_packages_dirs(update),
current_changelog: "",
feat: [],
fix: [],
highlight: [],
previous_version: packages.find(
(p) => p.packageJson.name === update
).packageJson.version,
dependencies: []
};
const changelog_path = join(
//@ts-ignore
lines[update].dirs[1] || lines[update].dirs[0],
"CHANGELOG.md"
);
if (existsSync(changelog_path)) {
//@ts-ignore
lines[update].current_changelog = readFileSync(
changelog_path,
"utf-8"
)
.replace(`# ${update}`, "")
.trim();
}
}
lines[update].dependencies.push(
` - ${dependency.name}@${dependency.newVersion}`
);
});
}
return ` - ${dependency.name}@${dependency.newVersion}`;
}
);
writeFileSync(
join(rootDir, ".changeset", "_changelog.json"),
JSON.stringify(lines, null, 2)
);
return [changesetLink, ...updatedDepenenciesList].join("\n");
},
/**
*
* @param {{summary: string, id: string, commit: string, releases: {name: string}[]}} changeset The changeset that has been created
* @param {any} type The type of changeset
* @param {any} options The options passed to the changelog generator
* @returns {Promise<string>} The release line for the changeset
*/
getReleaseLine: async (changeset, type, options) => {
if (!options || !options.repo) {
throw new Error(
'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]'
);
}
let prFromSummary;
let commitFromSummary;
/**
* @type {string[]}
*/
let usersFromSummary = [];
const replacedChangelog = changeset.summary
.replace(/^\s*(?:pr|pull|pull\s+request):\s*#?(\d+)/im, (_, pr) => {
let num = Number(pr);
if (!isNaN(num)) prFromSummary = num;
return "";
})
.replace(/^\s*commit:\s*([^\s]+)/im, (_, commit) => {
commitFromSummary = commit;
return "";
})
.replace(/^\s*(?:author|user):\s*@?([^\s]+)/gim, (_, user) => {
usersFromSummary.push(user);
return "";
})
.trim();
const [firstLine, ...futureLines] = replacedChangelog
.split("\n")
.map((l) => l.trimRight());
const links = await (async () => {
if (prFromSummary !== undefined) {
let { links } = await getInfoFromPullRequest({
repo: options.repo,
pull: prFromSummary
});
if (commitFromSummary) {
links = {
...links,
commit: `[\`${commitFromSummary}\`](https://github.com/${options.repo}/commit/${commitFromSummary})`
};
}
return links;
}
const commitToFetchFrom = commitFromSummary || changeset.commit;
if (commitToFetchFrom) {
let { links } = await getInfo({
repo: options.repo,
commit: commitToFetchFrom
});
return links;
}
return {
commit: null,
pull: null,
user: null
};
})();
const user_link = /\[(@[^]+)\]/.exec(links.user);
const users =
usersFromSummary && usersFromSummary.length
? usersFromSummary
.map((userFromSummary) => `@${userFromSummary}`)
.join(", ")
: user_link
? user_link[1]
: links.user;
const prefix = [
links.pull === null ? "" : `${links.pull}`,
links.commit === null ? "" : `${links.commit}`
]
.join(" ")
.trim();
const suffix = users === null ? "" : ` Thanks ${users}!`;
/**
* @typedef {{[key: string]: string[] | {dirs: string[], current_changelog: string, feat: {summary: string}[], fix: {summary: string}[], highlight: {summary: string}[]}}} ChangesetMeta
*/
/**
* @type { ChangesetMeta & { _handled: string[] } }}
*/
if (lines._handled.includes(changeset.id)) {
return "done";
}
lines._handled.push(changeset.id);
changeset.releases.forEach((release) => {
if (!lines[release.name]) {
lines[release.name] = {
dirs: find_packages_dirs(release.name),
current_changelog: "",
feat: [],
fix: [],
highlight: [],
previous_version: packages.find(
(p) => p.packageJson.name === release.name
).packageJson.version,
dependencies: []
};
}
const changelog_path = join(
//@ts-ignore
lines[release.name].dirs[1] || lines[release.name].dirs[0],
"CHANGELOG.md"
);
if (existsSync(changelog_path)) {
//@ts-ignore
lines[release.name].current_changelog = readFileSync(
changelog_path,
"utf-8"
)
.replace(`# ${release.name}`, "")
.trim();
}
const [, _type, summary] = changeset.summary
.trim()
.match(/^(feat|fix|highlight)\s*:\s*([^]*)/im) || [
,
"feat",
changeset.summary
];
let formatted_summary = "";
if (_type === "highlight") {
const [heading, ...rest] = summary.trim().split("\n");
const _heading = `${heading} ${prefix ? `(${prefix})` : ""}`;
const _rest = rest.concat(["", suffix]);
formatted_summary = `${_heading}\n${_rest.join("\n")}`;
} else {
formatted_summary = handle_line(summary, prefix, suffix);
}
//@ts-ignore
lines[release.name][_type].push({
summary: formatted_summary
});
});
writeFileSync(
join(rootDir, ".changeset", "_changelog.json"),
JSON.stringify(lines, null, 2)
);
return `\n\n-${prefix ? `${prefix} -` : ""} ${firstLine}\n${futureLines
.map((l) => ` ${l}`)
.join("\n")}`;
}
};
/**
* @param {string} str The changelog entry
* @param {string} prefix The prefix to add to the first line
* @param {string} suffix The suffix to add to the last line
* @returns {string} The formatted changelog entry
*/
function handle_line(str, prefix, suffix) {
const [_s, ...lines] = str.split("\n").filter(Boolean);
const desc = `${prefix ? `${prefix} -` : ""} ${_s.replace(
/[\s\.]$/,
""
)}. ${suffix}`;
if (_s.length === 1) {
return desc;
}
return [desc, ...lines.map((l) => ` ${l}`)].join("/n");
}
module.exports = changelogFunctions;

View File

@ -0,0 +1,11 @@
{
"$schema": "https://unpkg.com/@changesets/config@2.3.0/schema.json",
"changelog": ["./changeset.cjs", { "repo": "gradio-app/gradio" }],
"commit": false,
"fixed": [],
"linked": [],
"access": "public",
"baseBranch": "main",
"updateInternalDependencies": "patch",
"ignore": ["@self/spaces-test", "@self/cdn-test"]
}

View File

@ -0,0 +1,149 @@
const { join } = require("path");
const { readFileSync, existsSync, writeFileSync, unlinkSync } = require("fs");
const { getPackagesSync } = require("@manypkg/get-packages");
const RE_PKG_NAME = /^[\w-]+\b/;
const pkg_meta = getPackagesSync(process.cwd());
/**
* @typedef {{dirs: string[], highlight: {summary: string}[], feat: {summary: string}[], fix: {summary: string}[], current_changelog: string}} ChangesetMeta
*/
/**
* @typedef {{[key: string]: ChangesetMeta}} ChangesetMetaCollection
*/
function run() {
if (!existsSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json"))) {
console.warn("No changesets to process");
return;
}
/**
* @type { ChangesetMetaCollection & { _handled: string[] } }}
*/
const { _handled, ...packages } = JSON.parse(
readFileSync(
join(pkg_meta.rootDir, ".changeset", "_changelog.json"),
"utf-8"
)
);
/**
* @typedef { {packageJson: {name: string, version: string, python: boolean}, dir: string} } PackageMeta
*/
/**
* @type { {[key:string]: PackageMeta} }
*/
const all_packages = pkg_meta.packages.reduce((acc, pkg) => {
acc[pkg.packageJson.name] = /**@type {PackageMeta} */ (
/** @type {unknown} */ (pkg)
);
return acc;
}, /** @type {{[key:string] : PackageMeta}} */ ({}));
for (const pkg_name in packages) {
const { dirs, highlight, feat, fix, current_changelog, dependencies } =
/**@type {ChangesetMeta} */ (packages[pkg_name]);
if (pkg_name === "@gradio/lite") {
const target = all_packages.gradio.packageJson.version.split(".");
const current_version = packages[pkg_name].previous_version.split(".");
if (!packages.gradio) {
const patch = parseInt(current_version[2]) + 1;
const new_version = [target[0], target[1], patch];
all_packages[pkg_name].packageJson.version = new_version.join(".");
} else {
if (parseInt(target[1]) > parseInt(current_version[1])) {
all_packages[pkg_name].packageJson.version = target.join(".");
} else if (parseInt(target[1]) === parseInt(current_version[1])) {
const patch = parseInt(current_version[2]) + 1;
const new_version = [target[0], target[1], patch];
all_packages[pkg_name].packageJson.version = new_version.join(".");
}
}
writeFileSync(
join(all_packages[pkg_name].dir, "package.json"),
JSON.stringify(all_packages[pkg_name].packageJson, null, "\t") + "\n"
);
}
const { version, python } = all_packages[pkg_name].packageJson;
const highlights = highlight?.map((h) => `${h.summary}`) || [];
const features = feat?.map((f) => `- ${f.summary}`) || [];
const fixes = fix?.map((f) => `- ${f.summary}`) || [];
const deps = Array.from(new Set(dependencies?.map((d) => d.trim()))) || [];
const release_notes = /** @type {[string[], string][]} */ ([
[highlights, "### Highlights"],
[features, "### Features"],
[fixes, "### Fixes"],
[deps, "### Dependency updates"]
])
.filter(([s], i) => s.length > 0)
.map(([lines, title]) => {
if (title === "### Highlights") {
return `${title}\n\n${lines.join("\n\n")}`;
}
return `${title}\n\n${lines.join("\n")}`;
})
.join("\n\n");
const new_changelog = `# ${pkg_name}
## ${version}
${release_notes}
${current_changelog.replace(`# ${pkg_name}`, "").trim()}
`.trim();
dirs.forEach((dir) => {
writeFileSync(join(dir, "CHANGELOG.md"), new_changelog);
});
if (python) {
bump_local_dependents(pkg_name, version);
}
}
unlinkSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json"));
/**
* @param {string} pkg_to_bump The name of the package to bump
* @param {string} version The version to bump to
* @returns {void}
* */
function bump_local_dependents(pkg_to_bump, version) {
for (const pkg_name in all_packages) {
const {
dir,
packageJson: { python }
} = all_packages[pkg_name];
if (!python) continue;
const requirements_path = join(dir, "..", "requirements.txt");
const requirements = readFileSync(requirements_path, "utf-8").split("\n");
const pkg_index = requirements.findIndex((line) => {
const m = line.trim().match(RE_PKG_NAME);
if (!m) return false;
return m[0] === pkg_to_bump;
});
if (pkg_index !== -1) {
requirements[pkg_index] = `${pkg_to_bump}==${version}`;
writeFileSync(requirements_path, requirements.join("\n"));
}
}
}
}
run();

View File

@ -0,0 +1,35 @@
**/*.md
**/js/app/public/**
**/pnpm-workspace.yaml
**/js/app/dist/**
**/js/wasm/dist/**
**/js/preview/dist/**
**/client/js/dist/**
**/js/*/dist/**
**/pnpm-lock.yaml
**/js/plot/src/Plot.svelte
**/.svelte-kit/**
**/demo/**
**/gradio/**
**/.pnpm-store/**
**/.venv/**
/guides/**
**/.mypy_cache/**
!test-strategy.md
**/js/_space-test/**
../js/lite/src/theme.css
../js/storybook/theme.css
**/gradio_cached_examples/**
**/storybook-static/**
**/.vscode/**
sweep.yaml
**/.vercel/**
**/build/**
**/src/lib/json/**/*
**/playwright/.cache/**/*
**/theme/src/pollen.css
**/venv/**
../js/app/src/api_docs/CodeSnippet.svelte
../js/app/src/api_docs/RecordingSnippet.svelte
../.changeset/pre.json

View File

@ -0,0 +1,8 @@
{
"useTabs": true,
"singleQuote": false,
"trailingComma": "none",
"printWidth": 80,
"plugins": ["prettier-plugin-svelte"],
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
}

View File

@ -0,0 +1,95 @@
import { defineConfig } from "vite";
import { svelte } from "@sveltejs/vite-plugin-svelte";
import sveltePreprocess from "svelte-preprocess";
// @ts-ignore
import custom_media from "postcss-custom-media";
import global_data from "@csstools/postcss-global-data";
// @ts-ignore
import prefixer from "postcss-prefix-selector";
import { readFileSync } from "fs";
import { join } from "path";
import { fileURLToPath } from "url";
const __dirname = fileURLToPath(new URL(".", import.meta.url));
const version_path = join(__dirname, "..", "gradio", "package.json");
const theme_token_path = join(
__dirname,
"..",
"js",
"theme",
"src",
"tokens.css"
);
const version = JSON.parse(readFileSync(version_path, { encoding: "utf-8" }))
.version.trim()
.replace(/\./g, "-");
//@ts-ignore
export default defineConfig(({ mode }) => {
const production = mode === "production";
return {
server: {
port: 9876
},
resolve: {
conditions: ["gradio"]
},
build: {
sourcemap: false,
target: "esnext",
minify: production,
rollupOptions: {
external: ["virtual:component-loader"]
}
},
define: {
BUILD_MODE: production ? JSON.stringify("prod") : JSON.stringify("dev"),
BACKEND_URL: production
? JSON.stringify("")
: JSON.stringify("http://localhost:7860/"),
GRADIO_VERSION: JSON.stringify(version)
},
css: {
postcss: {
plugins: [
prefixer({
prefix: `.gradio-container-${version}`,
// @ts-ignore
transform(prefix, selector, prefixedSelector, fileName) {
if (selector.indexOf("gradio-container") > -1) {
return prefix;
} else if (
selector.indexOf(":root") > -1 ||
selector.indexOf("dark") > -1 ||
fileName.indexOf(".svelte") > -1
) {
return selector;
}
return prefixedSelector;
}
}),
custom_media()
]
}
},
plugins: [
svelte({
inspector: false,
compilerOptions: {
dev: !production
},
hot: !process.env.VITEST && !production,
preprocess: sveltePreprocess({
postcss: {
plugins: [
global_data({ files: [theme_token_path] }),
custom_media()
]
}
})
})
]
};
});

View File

@ -0,0 +1,63 @@
from __future__ import annotations
import shutil
import pathlib
from typing import Any
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
def copy_js_code(root: str | pathlib.Path):
NOT_COMPONENT = [
"app",
"node_modules",
"storybook",
"playwright-report",
"workbench",
"tooltils",
"component-test",
"core",
"spa",
]
for entry in (pathlib.Path(root) / "js").iterdir():
if (
entry.is_dir()
and not str(entry.name).startswith("_")
and not str(entry.name) in NOT_COMPONENT
):
def ignore(s, names):
ignored = []
for n in names:
if (
n.startswith("CHANGELOG")
or n.startswith("README.md")
or n.startswith("node_modules")
or ".test." in n
or ".stories." in n
or ".spec." in n
):
ignored.append(n)
return ignored
shutil.copytree(
str(entry),
str(pathlib.Path("gradio") / "_frontend_code" / entry.name),
ignore=ignore,
dirs_exist_ok=True,
)
shutil.copytree(
str(pathlib.Path(root) / "client" / "js"),
str(pathlib.Path("gradio") / "_frontend_code" / "client"),
ignore=lambda d, names: ["node_modules", "test"],
dirs_exist_ok=True,
)
class BuildHook(BuildHookInterface):
def initialize(self, version: str, build_data: dict[str, Any]) -> None:
copy_js_code(self.root)
if __name__ == "__main__":
copy_js_code(pathlib.Path("..").resolve())

View File

@ -0,0 +1,34 @@
[
"audio_debugger",
"blocks_essay",
"blocks_group",
"blocks_js_methods",
"blocks_layout",
"blocks_multiple_event_triggers",
"blocks_update",
"calculator",
"cancel_events",
"chatbot_multimodal",
"chatinterface_streaming_echo",
"clear_components",
"code",
"fake_gan",
"fake_diffusion_with_gif",
"file_explorer_component_events",
"image_mod_default_image",
"image_editor_events",
"image_segmentation",
"interface_random_slider",
"kitchen_sink",
"kitchen_sink_random",
"matrix_transpose",
"mini_leaderboard",
"model3D",
"native_plots",
"reverse_audio",
"stt_or_tts",
"stream_audio",
"stream_frames",
"video_component",
"zip_files"
]

View File

@ -0,0 +1,164 @@
import globals from "globals";
import ts_plugin from "@typescript-eslint/eslint-plugin";
import js_plugin from "@eslint/js";
import jsdoc from "eslint-plugin-jsdoc";
import typescriptParser from "@typescript-eslint/parser";
import sveltePlugin from "eslint-plugin-svelte";
import svelteParser from "svelte-eslint-parser";
const ts_rules_disabled = Object.fromEntries(
Object.keys(ts_plugin.rules).map((rule) => [
`@typescript-eslint/${rule}`,
"off"
])
);
const js_rules_disabled = Object.fromEntries(
Object.keys(js_plugin.configs.all.rules).map((rule) => [rule, "off"])
);
const jsdoc_rules_disabled = Object.fromEntries(
Object.keys(jsdoc.configs.recommended.rules).map((rule) => [
`jsdoc/${rule}`,
"off"
])
);
const js_rules = {
...js_rules_disabled,
// "no-console": ["error", { allow: ["warn", "error", "debug", "info"] }],
"no-constant-condition": "error",
"no-dupe-args": "error",
"no-extra-boolean-cast": "error",
"no-unexpected-multiline": "error",
"no-unreachable": "error",
"array-callback-return": "error",
complexity: "error",
"no-else-return": "error",
"no-useless-return": "error",
"no-undef": "error"
};
const ts_rules = {
...ts_rules_disabled,
"@typescript-eslint/adjacent-overload-signatures": "error",
"@typescript-eslint/explicit-function-return-type": [
"error",
{ allowExpressions: true }
],
"@typescript-eslint/consistent-type-exports": "error",
"@typescript-eslint/ban-types": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/no-inferrable-types": "error"
};
const jsdoc_rules = {
...jsdoc_rules_disabled,
"jsdoc/require-param-description": "error",
"jsdoc/require-returns-description": "error"
};
const { browser, es2021, node } = globals;
export default [
{
ignores: [
"**/.svelte-kit/**/*",
"**/node_modules/**",
"**/dist/**",
"**/.config/*",
"**/*.spec.ts",
"**/*.test.ts",
"**/*.node-test.ts",
"js/spa/test/**/*",
"**/*vite.config.ts",
"**/_website/**/*",
"**/app/**/*",
"**/_spaces-test/**/*",
"**/preview/test/**/*",
"**/component-test/**/*",
"**/js/wasm/src/webworker/**/*"
]
},
{
files: ["**/*.js", "**/*.cjs"],
languageOptions: {
globals: {
...browser,
...es2021,
...node
}
},
plugins: {
"eslint:recommended": js_plugin,
jsdoc
},
rules: { ...js_rules, ...jsdoc_rules }
},
{
files: ["**/*.ts"],
languageOptions: {
parser: typescriptParser,
parserOptions: {
project: "./tsconfig.json",
extraFileExtensions: [".svelte"]
},
globals: {
...browser,
...es2021,
...node
}
},
plugins: {
"@typescript-eslint": ts_plugin,
"eslint:recommended": js_plugin,
jsdoc
},
rules: {
...ts_rules,
...js_rules,
...jsdoc_rules,
"no-undef": "off"
}
},
{
files: ["**/client/js/**"],
languageOptions: {
parserOptions: {
project: "./client/js/tsconfig.json"
}
}
},
{
files: ["**/*.svelte"],
languageOptions: {
parser: svelteParser,
parserOptions: {
parser: typescriptParser,
project: "./tsconfig.json",
extraFileExtensions: [".svelte"]
},
globals: {
...browser,
...es2021
}
},
plugins: {
svelte: sveltePlugin,
"@typescript-eslint": ts_plugin,
"eslint:recommended": js_plugin,
jsdoc
},
rules: {
...ts_rules,
...js_rules,
...jsdoc_rules,
...sveltePlugin.configs.recommended.rules,
"svelte/no-at-html-tags": "off",
"no-undef": "off"
}
}
];

View File

@ -0,0 +1,33 @@
[build-system]
requires = ["hatchling",]
build-backend = "hatchling.build"
[project]
name = "lite-builder"
description = "Python library for easily interacting with trained machine learning models"
license = "Apache-2.0"
version = "0.0.2"
requires-python = ">=3.8"
authors = [
{ name = "Abubakar Abid", email = "gradio-team@huggingface.co" },
{ name = "Ali Abid", email = "gradio-team@huggingface.co" },
{ name = "Ali Abdalla", email = "gradio-team@huggingface.co" },
{ name = "Dawood Khan", email = "gradio-team@huggingface.co" },
{ name = "Ahsen Khaliq", email = "gradio-team@huggingface.co" },
{ name = "Pete Allen", email = "gradio-team@huggingface.co" },
{ name = "Ömer Faruk Özdemir", email = "gradio-team@huggingface.co" },
{ name = "Freddy A Boulton", email = "gradio-team@huggingface.co" },
{ name = "Hannah Blair", email = "gradio-team@huggingface.co" },
]
keywords = ["machine learning", "reproducibility", "visualization"]
classifiers = [
'Development Status :: 5 - Production/Stable',
]
[tool.hatch.build]
sources = ["src"]
only-packages = true
[project.entry-points.hatch]
lite_builder = "lite_builder.hooks"

View File

@ -0,0 +1,5 @@
from hatchling.builders.wheel import WheelBuilder
class LiteBuilder(WheelBuilder):
PLUGIN_NAME = 'lite'

View File

@ -0,0 +1,6 @@
from hatchling.plugin import hookimpl
from .builder import LiteBuilder
@hookimpl
def hatch_register_builder():
return LiteBuilder

View File

@ -0,0 +1,41 @@
import { defineConfig, devices } from "@playwright/experimental-ct-svelte";
import config from "./basevite.config";
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: "../",
/* The base directory, relative to the config file, for snapshot files created with toMatchSnapshot and toHaveScreenshot. */
snapshotDir: "./__snapshots__",
/* Maximum time one test can run for. */
timeout: 10 * 1000,
/* Run tests in files in parallel */
fullyParallel: true,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: !!process.env.CI,
/* Retry on CI only */
retries: process.env.CI ? 2 : 0,
/* Opt out of parallel tests on CI. */
workers: process.env.CI ? 1 : undefined,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: "html",
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: "on-first-retry",
/* Port to use for Playwright component endpoint. */
ctPort: 3100,
ctViteConfig: config({ mode: "development", command: "build" })
},
testMatch: "*.component.spec.ts",
/* Configure projects for major browsers */
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] }
}
]
});

View File

@ -0,0 +1,184 @@
import { spawn } from "node:child_process";
import { join, basename } from "path";
import { fileURLToPath } from "url";
import { readdirSync, writeFileSync } from "fs";
import net from "net";
import kl from "kleur";
const __dirname = fileURLToPath(new URL(".", import.meta.url));
const TEST_APP_PATH = join(__dirname, "./test.py");
const TEST_FILES_PATH = join(__dirname, "..", "js", "spa", "test");
const ROOT = join(__dirname, "..");
const test_files = readdirSync(TEST_FILES_PATH)
.filter(
(f) =>
f.endsWith("spec.ts") &&
!f.endsWith(".skip.spec.ts") &&
!f.endsWith(".component.spec.ts") &&
!f.endsWith(".reload.spec.ts")
)
.map((f) => ({
module_name: `${basename(f, ".spec.ts")}.run`,
dir_name: basename(f, ".spec.ts")
}));
export default async function global_setup() {
const verbose = process.env.GRADIO_TEST_VERBOSE;
const port = await find_free_port(7860, 8860);
process.env.GRADIO_E2E_TEST_PORT = port;
process.stdout.write(kl.yellow("\nCreating test gradio app.\n\n"));
const test_cases = [];
// check if there is a testcase file in the same directory as the test file
// if there is, append that to the file
test_files.forEach((value) => {
const test_case_dir = join(ROOT, "demo", value.dir_name);
readdirSync(test_case_dir)
.filter((f) => f.endsWith("_testcase.py"))
.forEach((f) => {
test_cases.push({
module_name: `${value.dir_name}.${basename(f, ".py")}`,
dir_name: `${value.dir_name}_${basename(f, ".py")}`
});
});
});
const all_test_files = test_files.concat(test_cases);
const test_app = make_app(all_test_files, port);
process.stdout.write(kl.yellow("App created. Starting test server.\n\n"));
process.stdout.write(kl.bgBlue(" =========================== \n"));
process.stdout.write(kl.bgBlue(" === PYTHON STARTUP LOGS === \n"));
process.stdout.write(kl.bgBlue(" =========================== \n\n"));
writeFileSync(TEST_APP_PATH, test_app);
const app = await spawn_gradio_app(TEST_APP_PATH, port, verbose);
process.stdout.write(
kl.green(`\n\nServer started. Running tests on port ${port}.\n`)
);
return () => {
process.stdout.write(kl.green(`\nTests complete, cleaning up!\n`));
kill_process(app);
};
}
const INFO_RE = /^INFO:/;
function spawn_gradio_app(app, port, verbose) {
const PORT_RE = new RegExp(`:${port}`);
return new Promise((res, rej) => {
const _process = spawn(`python`, [app], {
shell: true,
stdio: "pipe",
cwd: ROOT,
env: {
...process.env,
PYTHONUNBUFFERED: "true",
GRADIO_ANALYTICS_ENABLED: "False",
GRADIO_IS_E2E_TEST: "1",
GRADIO_RESET_EXAMPLES_CACHE: "True"
}
});
_process.stdout.setEncoding("utf8");
function std_out(data) {
const _data = data.toString();
const is_info = INFO_RE.test(_data);
if (is_info) {
process.stdout.write(kl.yellow(_data));
}
if (!is_info) {
process.stdout.write(`${_data}\n`);
}
if (PORT_RE.test(_data)) {
process.stdout.write(kl.bgBlue("\n =========== END =========== "));
res(_process);
if (!verbose) {
_process.stdout.off("data", std_out);
_process.stderr.off("data", std_out);
}
}
}
_process.stdout.on("data", std_out);
_process.stderr.on("data", std_out);
_process.on("exit", () => kill_process(_process));
_process.on("close", () => kill_process(_process));
_process.on("disconnect", () => kill_process(_process));
});
}
function kill_process(process) {
process.kill("SIGKILL");
}
function make_app(demos, port) {
return `
import uvicorn
from fastapi import FastAPI
import gradio as gr
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
${demos.map((obj) => `from demo.${obj.module_name} import demo as ${obj.dir_name}`).join("\n")}
app = FastAPI()
${demos
.map(
(obj) =>
`app = gr.mount_gradio_app(app, ${obj.dir_name}, path="/${obj.dir_name}", max_file_size=${
obj.dir_name == "upload_file_limit_test" ? "'15kb'" : "None"
})`
)
.join("\n")}
config = uvicorn.Config(app, port=${port}, log_level="info")
server = uvicorn.Server(config=config)
server.run()`;
}
export async function find_free_port(start_port, end_port) {
for (let port = start_port; port < end_port; port++) {
if (await is_free_port(port)) {
return port;
}
}
throw new Error(
`Could not find free ports: there were not enough ports available.`
);
}
export function is_free_port(port) {
return new Promise((accept, reject) => {
const sock = net.createConnection(port, "127.0.0.1");
sock.once("connect", () => {
sock.end();
accept(false);
});
sock.once("error", (e) => {
sock.destroy();
if (e.code === "ECONNREFUSED") {
accept(true);
} else {
reject(e);
}
});
});
}

View File

@ -0,0 +1,75 @@
import { defineConfig, devices } from "@playwright/test";
const base = defineConfig({
use: {
screenshot: "only-on-failure",
trace: "retain-on-failure",
bypassCSP: true,
launchOptions: {
args: [
"--disable-web-security",
"--use-fake-device-for-media-stream",
"--use-fake-ui-for-media-stream",
"--use-file-for-fake-audio-capture=../gradio/test_data/test_audio.wav"
]
}
},
expect: { timeout: 10000 },
timeout: 30000,
testMatch: /.*\.spec\.ts/,
testDir: "..",
workers: process.env.CI ? 1 : undefined,
retries: 3
});
const normal = defineConfig(base, {
globalSetup: process.env.CUSTOM_TEST ? undefined : "./playwright-setup.js",
projects: [
{
name: "firefox",
use: { ...devices["Desktop Firefox"] },
testMatch: /.stream_(audio|video)_out\.spec\.ts/
},
{
name: "chrome",
use: {
...devices["Desktop Chrome"],
permissions: ["clipboard-read", "clipboard-write", "microphone"]
},
testIgnore: /.stream_(audio|video)_out\.spec\.ts/
}
]
});
const lite = defineConfig(base, {
webServer: {
command: "python -m http.server 8000 --directory ../js/lite",
url: "http://localhost:8000/",
reuseExistingServer: !process.env.CI
},
testMatch: [
// "**/file_component_events.spec.ts",
"**/kitchen_sink.spec.ts",
"**/gallery_component_events.spec.ts",
"**/image_remote_url.spec.ts" // To detect the bugs on Lite fixed in https://github.com/gradio-app/gradio/pull/8011 and https://github.com/gradio-app/gradio/pull/8026
// "**/outbreak_forecast.spec.ts" // To test matplotlib on Lite
],
workers: 1,
retries: 3,
timeout: 60000,
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] }
},
process.env.CI
? undefined // There are Firefox-specific issues such as https://github.com/gradio-app/gradio/pull/9528 so we want to run the tests on Firefox, but Firefox sometimes fails to start in the GitHub Actions environment so we disable it on CI.
: {
name: "firefox",
use: { ...devices["Desktop Firefox"] },
testIgnore: "**/kitchen_sink.*" // This test requires the camera permission but it's not supported on FireFox: https://github.com/microsoft/playwright/issues/11714
}
].filter(Boolean)
});
export default !!process.env.GRADIO_E2E_TEST_LITE ? lite : normal;

View File

@ -0,0 +1,12 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Testing Page</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="./index.ts"></script>
</body>
</html>

View File

@ -0,0 +1,2 @@
// Import styles, initialize component theme here.
// import '../src/common.css';

View File

@ -0,0 +1,8 @@
const tailwindcss = require("tailwindcss");
const autoprefixer = require("autoprefixer");
const nested = require("tailwindcss/nesting");
const tw_config = require("./tailwind.config.cjs");
module.exports = {
plugins: [nested, tailwindcss(tw_config), autoprefixer]
};

View File

@ -0,0 +1,8 @@
import type { TestingLibraryMatchers } from "@testing-library/jest-dom/matchers";
import "@testing-library/jest-dom/vitest";
declare module "vitest" {
interface Assertion<T = any>
extends jest.Matchers<void, T>,
TestingLibraryMatchers<T, void> {}
}

View File

@ -0,0 +1,5 @@
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte";
export default {
preprocess: vitePreprocess()
};

View File

@ -0,0 +1,12 @@
module.exports = {
content: [
"./src/**/*.{html,js,svelte,ts}",
"**/@gradio/**/*.{html,js,svelte,ts}"
],
theme: {
extend: {}
},
plugins: [require("@tailwindcss/forms")]
};

View File

@ -0,0 +1,3 @@
import config from "../js/spa/vite.config";
export default config;

View File

@ -0,0 +1,43 @@
// See https://containers.dev
{
"name": "Python 3",
"image": "mcr.microsoft.com/devcontainers/python:1-3.10",
// See https://containers.dev/features
"features": {
"ghcr.io/devcontainers/features/git:1": {},
"ghcr.io/devcontainers/features/node:1": {
"pnpmVersion": "9"
},
"ghcr.io/devcontainers-contrib/features/ffmpeg-apt-get:1": {}
},
"hostRequirements": {
"cpus": 4,
"memory": "8gb",
"storage": "32gb"
},
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.black-formatter",
"ms-toolsai.jupyter",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"phoenisx.cssvar"
],
"remote.autoForwardPorts": false
}
},
"forwardPorts": [7860, 9876],
"portsAttributes": {
"7860": { "label": "gradio port" },
"9876": { "label": "gradio dev port" }
},
"postCreateCommand": "export NODE_OPTIONS=\"--max-old-space-size=8192\" && chmod +x scripts/install_gradio.sh scripts/install_test_requirements.sh scripts/build_frontend.sh && ./scripts/install_test_requirements.sh && ./scripts/install_gradio.sh && ./scripts/build_frontend.sh"
}

View File

@ -0,0 +1,41 @@
# Python build
.eggs/
gradio.egg-info/*
!gradio.egg-info/requires.txt
!gradio.egg-info/PKG-INFO
dist/
*.pyc
__pycache__/
*.py[cod]
*$py.class
build/
# JS build
gradio/templates/frontend/static
gradio/templates/frontend/cdn
# Secrets
.env
# Gradio run artifacts
*.db
*.sqlite3
gradio/launches.json
gradio/hash_seed.txt
# Tests
.coverage
coverage.xml
test.txt
# Demos
demo/tmp.zip
demo/flagged
demo/files/*.avi
demo/files/*.mp4
# Etc
.idea/*
.DS_Store
*.bak
workspace.code-workspace

View File

@ -0,0 +1,8 @@
root = true
[{js/**,client/js/**}]
end_of_line = lf
insert_final_newline = true
indent_style = tab
tab_width = 2

View File

@ -0,0 +1,14 @@
# https://github.com/gradio-app/gradio/pull/4487 - refactor components.py to separate files
69f36f98535c904e7cac2b4942cecc747ed7443c
# Format the codebase
cc0cff893f9d7d472788adc2510c123967b384fe
# Switch from black to ruff
8a70e83db9c7751b46058cdd2514e6bddeef6210
# format (#4810)
7fa5e766ce0f89f1fb84c329e62c9df9c332120a
# lint website
4bf301324b3b180fa32166ff1774312b01334c88
# format frontend with prettier
980b9f60eb49ed81e4957debe7b23a559a4d4b51
# Refactor component directories (#5074)
1419538ea795caa391e3de809379f10639e9e764

100
gradio-5.35.0/.gitignore vendored Normal file
View File

@ -0,0 +1,100 @@
# Python build
.eggs/
gradio.egg-info
dist/
dist-lite/
*.pyc
__pycache__/
*.py[cod]
*$py.class
build/
!js/build/
!js/build/dist/
__tmp/*
*.pyi
!gradio/stubs/**/*.pyi
.ipynb_checkpoints/
.python-version
=23.2
# JS build
gradio/templates/*
gradio/node/*
gradio/_frontend_code/*
js/gradio-preview/test/*
# Secrets
.env
# Gradio run artifacts
*.db
*.sqlite3
gradio/launches.json
gradio/hash_seed.txt
.gradio/
tmp.zip
# Tests
.coverage
coverage.xml
test.txt
**/snapshots/**/*.png
playwright-report/
.hypothesis
.lite-perf.json
# Demos
demo/tmp.zip
demo/files/*.avi
demo/files/*.mp4
demo/all_demos/demos/*
demo/all_demos/requirements.txt
demo/*/config.json
demo/annotatedimage_component/*.png
demo/fake_diffusion_with_gif/*.gif
demo/cancel_events/cancel_events_output_log.txt
demo/unload_event_test/output_log.txt
demo/stream_video_out/output_*.ts
demo/stream_video_out/output_*.mp4
demo/stream_audio_out/*.mp3
#demo/image_editor_story/*.png
# Etc
.idea/*
.DS_Store
*.bak
workspace.code-workspace
*.h5
# dev containers
.pnpm-store/
# log files
.pnpm-debug.log
# Local virtualenv for devs
.venv*
# FRP
gradio/frpc_*
.vercel
# js
node_modules
public/build/
test-results
client/js/dist/*
client/js/test.js
.config/test.py
.svelte-kit
# storybook
storybook-static
build-storybook.log
js/storybook/theme.css
#js/storybook/public/output-image.png
# playwright
.config/playwright/.cache

9
gradio-5.35.0/.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,9 @@
{
"recommendations": [
"dbaeumer.vscode-eslint",
"phoenisx.cssvar",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"charliermarsh.ruff"
]
}

26
gradio-5.35.0/.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,26 @@
{
"python.formatting.provider": "none",
"cssvar.files": ["./js/node_modules/pollen-css/pollen.css"],
"cssvar.ignore": [],
"cssvar.disableSort": true,
"cssvar.extensions": ["js", "css", "html", "jsx", "tsx", "svelte"],
"python.analysis.extraPaths": ["./gradio/themes/utils"],
"svelte.plugin.svelte.format.enable": true,
"svelte.plugin.svelte.diagnostics.enable": false,
"svelte.enable-ts-plugin": true,
"prettier.configPath": ".config/.prettierrc.json",
"prettier.ignorePath": ".config/.prettierignore",
"python.analysis.typeCheckingMode": "basic",
"python.testing.pytestArgs": ["."],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"eslint.validate": ["javascript", "typescript", "html", "markdown", "svelte"],
"eslint.experimental.useFlatConfig": true,
"eslint.options": {
"overrideConfigFile": "./.config/eslint.config.js"
},
"typescript.tsdk": "node_modules/typescript/lib",
"i18n-ally.localesPaths": [
"js/spa/src/lang"
]
}

7104
gradio-5.35.0/CHANGELOG.md Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,45 @@
cff-version: 1.2.0
message: Please cite this project using these metadata.
title: "Gradio: Hassle-free sharing and testing of ML models in the wild"
abstract: >-
Accessibility is a major challenge of machine learning (ML).
Typical ML models are built by specialists and require
specialized hardware/software as well as ML experience to
validate. This makes it challenging for non-technical
collaborators and endpoint users (e.g. physicians) to easily
provide feedback on model development and to gain trust in
ML. The accessibility challenge also makes collaboration
more difficult and limits the ML researcher's exposure to
realistic data and scenarios that occur in the wild. To
improve accessibility and facilitate collaboration, we
developed an open-source Python package, Gradio, which
allows researchers to rapidly generate a visual interface
for their ML models. Gradio makes accessing any ML model as
easy as sharing a URL. Our development of Gradio is informed
by interviews with a number of machine learning researchers
who participate in interdisciplinary collaborations. Their
feedback identified that Gradio should support a variety of
interfaces and frameworks, allow for easy sharing of the
interface, allow for input manipulation and interactive
inference by the domain expert, as well as allow embedding
the interface in iPython notebooks. We developed these
features and carried out a case study to understand Gradio's
usefulness and usability in the setting of a machine
learning collaboration between a researcher and a
cardiologist.
authors:
- family-names: Abid
given-names: Abubakar
- family-names: Abdalla
given-names: Ali
- family-names: Abid
given-names: Ali
- family-names: Khan
given-names: Dawood
- family-names: Alfozan
given-names: Abdulrahman
- family-names: Zou
given-names: James
doi: 10.48550/arXiv.1906.02569
date-released: 2019-06-06
url: https://arxiv.org/abs/1906.02569

View File

@ -0,0 +1,134 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual
identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall
community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or advances of
any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address,
without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official email address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
[Discord], or at our [Email].
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series of
actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent
ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the
community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations
[Discord]: https://discord.com/invite/feTf9x3ZSB
[Email]: gradio-team@huggingface.co

View File

@ -0,0 +1,570 @@
# Contributing to Gradio
![GitHub issues by-label](https://img.shields.io/github/issues/gradio-app/gradio/good%20first%20issue?color=fe7c01&link=https%3A%2F%2Fgithub.com%2Fgradio-app%2Fgradio%2Fissues%3Fq%3Dis%253Aopen%2Bis%253Aissue%2Blabel%253A%2522good%2Bfirst%2Bissue%2522)
More than 300 awesome developers have contributed to the `gradio` library, and we'd be thrilled if you would like to be the next contributor!
**Prerequisites**:
- [Python 3.10+](https://www.python.org/downloads/)
- [Node.js v16.14+](https://nodejs.dev/en/download/package-manager/) (only needed if you are making changes to the frontend)
- [pnpm 9.x](https://pnpm.io/9.x/installation) (only needed if you are making changes to the frontend)
**Steps to Contribute**:
Generally speaking, contributing to Gradio involves four steps:
1. Identify a good issue to contribute to (such as any of the issues [tagged with "good first issue"]())
2. Setup Gradio locally
3. Understand the structure of the codebase & make the changes to the codebase locally
4. Open a pull request (PR) to upstream your changes to the Gradio repository
**Note:** We welcome meaningful contributions that solve real issues and improve the codebase. Please avoid opening PRs with content generated primarily by AI language models. All contributions should demonstrate clear understanding of the problem being solved and be consistent with the relevant sections of the Gradio codebase.
You can watch this short video walkthrough of how to contribute, or keep reading below:
<a href="https://www.youtube.com/watch?v=YTjwTe5Yurs&ab_channel=HuggingFace" target="_blank">
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/contributing-video-screenshot.png" style="width:100%">
</a>
## 🏡 Setup Gradio locally
There are a few ways to install and run Gradio.
### 🛠️ Install Gradio from `main`
- Clone this repo
- Navigate to the repo directory and run:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```bash
bash scripts/install_gradio.sh
```
</td>
<td>
```bash
scripts\install_gradio.bat
```
</td>
</tr>
</table>
- Run the frontend (only required if you are making changes to the frontend and would like to preview them)
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```bash
bash scripts/run_frontend.sh
```
</td>
<td>
```bash
scripts\run_frontend.bat
```
</td>
</tr>
</table>
- Install test requirements (only required if you want to run tests locally)
(Note that it is highly recommended to use a virtual environment running **Python 3.10** since the versions of Gradio's dependencies are pinned)
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```bash
bash scripts/install_test_requirements.sh
```
</td>
<td>
```bash
scripts\install_test_requirements.bat
```
</td>
</tr>
</table>
If you have a different Python version and conflicting packages during the installation, please first run:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```bash
bash scripts/create_test_requirements.sh
```
</td>
<td>
```bash
scripts\create_test_requirements.bat
```
</td>
</tr>
</table>
### 📦 Using dev containers
Instead of installing Gradio locally, you can alternatively use dev containers. This is supported on all platforms (macOS/Windows/Linux), as well as on GitHub Codespaces.
Prerequisites:
- An editor which supports dev containers, like VS Code
- Docker support on the host computer:
- macOS: [Docker Desktop 2.0+](https://www.docker.com/products/docker-desktop/)
- Windows: [Docker Desktop 2.0+](https://www.docker.com/products/docker-desktop/)
- Linux: [Docker CE/EE 18.06+](https://docs.docker.com/get-docker/) and [Docker Compose 1.21+](https://docs.docker.com/compose/install/)
- If using VS Code, the [Dev Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension
Steps:
- Clone repository
- Open it in your editor
- For VS Code, execute `Dev Containers: Reopen in container` command
For detailed instructions, please see the [Dev Containers tutorial](https://code.visualstudio.com/docs/devcontainers/tutorial).
## 🧱 Structure of the Repository
If you're a newcomer to Gradio, we recommend getting familiar with the overall structure of the repository so that you can focus on the part of the source code you'd like to contribute to.
- `/gradio`: contains the Python source code for the library
- `/gradio/interface.py`: contains the Python source code for the core `Interface` class
- `/gradio/blocks.py`: contains the Python source code for the core `Blocks` class
- `/gradio/components/`: the directory that contains the Python source code for all of the Gradio components.
- `/test`: contains Python unit tests for the library
- `/js`: contains the HTML/JS/CSS source code for the library, including the fronted code for each component in a separate directory
- `/js/_website`: contains the code for the Gradio website (www.gradio.app). See the README in the `/js/_website` folder for more details
- `/guides`: the written guides and tutorials that are found on Gradio's website.
## 🚀 Run a Gradio app
You can get started by creating an `app.py` file in the root:
```py
import gradio as gr
with gr.Blocks() as demo:
gr.Button()
if __name__ == "__main__":
demo.launch()
```
then run:
```
gradio app.py
```
This will start the backend server in reload mode, which will watch for changes in the `gradio` folder and reload the app if changes are made. By default, Gradio will launch on port 7860. You can also just use `python app.py`, but this won't automatically trigger updates.
Note: if you have `gradio` installed elsewhere in your system, you may need to uninstall it or at least make sure your `PYTHONPATH` includes the directory where the Gradio repository is cloned, e.g.,
`export PYTHONPATH="./"`
If you're making frontend changes, start the frontend server:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```bash
bash scripts/run_frontend.sh
```
</td>
<td>
```bash
scripts\run_frontend.bat
```
</td>
</tr>
</table>
This will open a separate browser tab. By default, Gradio will launch this on port 9876. Any changes to the frontend will also reload automatically in the browser. For more information about developing in the frontend, you can refer to [js/README.md](js/README.md).
We also have demos of all our components in the `/gradio/demo` directory. To get our simple gradio Chatbot running locally:
```
gradio demo/chatbot_simple/run.py
```
## 🧪 Testing
We use Pytest, Playwright and Vitest to test our code.
- The Python tests are located in `/test`. To run these tests:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```
bash scripts/run_backend_tests.sh
```
</td>
<td>
```bash
scripts\run_backend_tests.bat
```
</td>
</tr>
</table>
- The frontend unit tests are any defined with the filename `*.test.ts`. To run them:
```
pnpm test
```
- Browser tests are located in `js/spa/test` and are defined as `*spec.ts` files.
To install browser test dependencies:
```
pip install -r demo/outbreak_forecast/requirements.txt
pip install -r demo/stream_video_out/requirements.txt
pnpm exec playwright install chromium firefox
pnpm exec playwright install-deps chromium firefox
pnpm --filter @gradio/utils --filter @gradio/theme package
```
To run browser tests:
```
pnpm test:browser
```
To build the frontend code before running browser tests:
```
pnpm test:browser:full
```
You can also run browser tests in the UI mode by adding the `--ui` flag:
```
pnpm test:browser --ui
```
If you have made any significant visual changes to a component, we encourage you to add a new Storybook story or amend an existing one to reflect them. You can create a new story with a `*.stories.svelte` file. You can run the storybook locally:
```
pnpm storybook
```
## ✍️ Gradio Website & Docs
We also welcome any contributions to our [website](https://www.gradio.app) and [docs](https://www.gradio.app/docs).
### Building The Website
All of the website code lives in the `js/_website/` directory.
To start the website on dev mode simply cd into this directory and run:
```
pnpm i
pnpm dev
```
This will serve the website on `http://localhost:5173/` (or the next available port).
When you're done with changes and want to build the website you can run:
```
pnpm build && pnpm preview
```
This will serve the website on `http://localhost:4173/` (or the next available port).
### Documentation
#### API Reference
Gradio's [API reference](https://www.gradio.app/docs/gradio/interface) is built from templates written in [mdsvex](https://mdsvex.pngwn.io/). You can find all the templates in this directory:
```
js/_website/src/lib/templates/gradio
```
The templates directory is structured as follows:
```
├── gradio/
│ ├── 01_building-demos/
│ │ ├── 01_interface.svx
│ │ ├── 02_chatinterface.svx
│ │ ├── 03_tabbedinterface.svx
│ │ ├── 04_blocks.svx
│ ├── 02_blocks-layout/
│ ├── 03_components/
│ ├── 04_helpers/
│ ├── 05_modals/
│ ├── 06_routes/
│ ├── other/
```
This structure defines the pages' ordering. You can use a numeral prefix (XX_) before a name to dictate where a page is listed, but it's otherwise ignored in the url route. Note that the folder names (01_building-demos, etc) are only used for the navbar and are not in the url.
The mdsvex files use a combination of markdown and svelte. They also pull documentation directly from the source code. Adding a `@document()` wrapper around any class or function in the source code will make its docstrings available in the templates.
Here's an example: the template for [Image docs](https://www.gradio.app/docs/gradio/image) is [here](https://github.com/gradio-app/gradio/blob/main/js/_website/src/lib/templates/gradio/03_components/image.svx). You can see the initialization section references `obj.parameters`. So to edit the description of a parameter you'll have to edit the docstring in the [source code](https://github.com/gradio-app/gradio/blob/main/gradio/components/image.py). But the page also includes a section titled 'GIF and SVG Image Formats' which is written in plain markdown and can be edited directly on the template.
If you are making changes to docstrings and want to see them on the website you have to make sure you're on an editable install of the gradio library. Just run this command from root:
```
pip install -e .
```
And then from the website directory:
```
pnpm dev
```
#### Guides
Guides like [Quickstart](https://www.gradio.app/guides/quickstart) are built from this directory: `/guides`. The directory follows the same structure as the API reference templates, with nested folders and numerical prefixes for ordering, but the files are standard markdown files. After adding a new guide, or editing an existing one, to see the changes on the website make sure you are on an editable install of the gradio library. Run this command from root:
```
pip install -e .
```
and then from the website directory:
```
pnpm dev
```
#### Main vs. Released
The website supports documentation for both the latest released version on pypi as well as the main build on github. You can switch between them on the website by using the toggle on any page or by prefixing '/main' before the route in the url. For example: https://www.gradio.app/main/guides/quickstart
If you're making changes to documentation and are wondering why they're not showing up, make sure you're looking at the 'main' version of the page. Since they haven't been included in a release yet, they will only be visible there.
## 🌎 Gradio-Lite
Gradio-Lite is a Pyodide-based library that lets you run Gradio serverless (in other words, directly in your browser).
You can start the development server by running:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```
bash scripts/run_lite.sh
```
</td>
<td>
```bash
scripts\run_lite.bat
```
</td>
</tr>
</table>
If you make changes to the Python code during development, you will need to rebuild the Python packages loaded to Graio-Lite. To do this, run:
```
pnpm --filter @gradio/lite pybuild
```
To generate the release build, run:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```
bash scripts/build_lite.sh
```
</td>
<td>
```bash
scripts\build_lite.bat
```
</td>
</tr>
</table>
The release build will be located in the `dist` directory in the `js/lite` project.
To test it, you can run a local server in the `js/lite` directory:
```
python -m http.server --directory js/lite
```
and navigate to `http://localhost:8000` in your browser. The demo page `index.html` located in the `js/lite` directory will be loaded.
## 📮 Submitting PRs
All PRs should be submitted against `main`, and ideally should address an open issue, unless the change is small. Direct commits to main are blocked, and PRs require an approving review to merge into main. By convention, the Gradio maintainers will review PRs when:
- An initial review has been requested
- A clear, descriptive title has been assigned to the PR
- A maintainer (@abidlabs, @aliabid94, @aliabd, @AK391, @dawoodkhan82, @pngwn, @freddyaboulton, @hannahblair, @hysts, @whitphx) is tagged in the PR comments and asked to complete a review
🧹 We ask that you make sure initial CI checks are passing before requesting a review. One of the Gradio maintainers will merge the PR when all the checks are passing. You can safely ignore the Vercel and Spaces checks, which only run under maintainers' pull requests.
Don't forget to format your code before pushing:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```
bash scripts/format_backend.sh
```
</td>
<td>
```bash
scripts\format_backend.bat
```
</td>
</tr>
</table>
And if you made changes to the frontend:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```
bash scripts/format_frontend.sh
```
</td>
<td>
```bash
scripts\format_frontend.bat
```
</td>
</tr>
</table>
Thank you for taking the time to contribute to Gradio!
## ❓ Need help getting started?
- Browse [issues](https://github.com/gradio-app/gradio/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) with the "good first issue" label. These are issues we think are good for newcomers.
- Ask the Gradio community in our [Discord](https://discord.com/invite/feTf9x3ZSB)
- Raise an issue for a feature or a bug you want to tackle
## 🚧 Troubleshooting
`ERROR: Error loading ASGI app. Could not import module "<filename>"`
Verify that you've used the correct filename of your gradio app, and that you're in the directory of the file.
---
```ERR_PNPM_RECURSIVE_RUN_FIRST_FAIL @self/spa@1.0.0 build:local: vite build --mode production:local --emptyOutDir "--emptyOutDir"```
Delete `/node_modules` and `pnpm-lock.yaml`:
```
rm -rf node_modules/
rm pnpm-lock.yaml
```
and run the install scripts:
<table>
<tr>
<th>MacOS / Linux</th>
<th>Windows</th>
</tr>
<tr>
<td>
```
bash scripts/install_gradio.sh
bash scripts/build_frontend.sh
```
</td>
<td>
```bash
scripts\install_gradio.bat
scripts\build_frontend.bat
```
</td>
</tr>
</table>
---
```FATAL ERROR: Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory``` when running `scripts/build_frontend.sh`.
Run `scripts/build_frontend.sh` with the environment variable `NODE_OPTIONS=--max_old_space_size=2048` to increase the heap size.
---
In the case of:
- Unexpected exceptions being thrown, or
- The following warning:
`IMPORTANT: You are using gradio version <earlier version>, however version <later version> is available, please upgrade.`
ensure your `PYTHONPATH` includes the directory where the Gradio repository is cloned, e.g.:
```export PYTHONPATH="./"```
This ensures that when `gradio` is imported in a python program, it is this current version from this repository.
---
_Could these guidelines be clearer? Feel free to open a PR to help us facilitate open-source contributions!_

201
gradio-5.35.0/LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

214
gradio-5.35.0/README.md Normal file
View File

@ -0,0 +1,214 @@
<!-- DO NOT EDIT THIS FILE DIRECTLY. INSTEAD EDIT THE `readme_template.md` OR `guides/01_getting-started/01_quickstart.md` TEMPLATES AND THEN RUN `render_readme.py` SCRIPT. -->
<div align="center">
<a href="https://gradio.app">
<img src="readme_files/gradio.svg" alt="gradio" width=350>
</a>
</div>
<div align="center">
<span>
<a href="https://www.producthunt.com/posts/gradio-5-0?embed=true&utm_source=badge-featured&utm_medium=badge&utm_souce=badge-gradio&#0045;5&#0045;0" target="_blank"><img src="https://api.producthunt.com/widgets/embed-image/v1/featured.svg?post_id=501906&theme=light" alt="Gradio&#0032;5&#0046;0 - the&#0032;easiest&#0032;way&#0032;to&#0032;build&#0032;AI&#0032;web&#0032;apps | Product Hunt" style="width: 150px; height: 54px;" width="150" height="54" /></a>
<a href="https://trendshift.io/repositories/2145" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2145" alt="gradio-app%2Fgradio | Trendshift" style="width: 150px; height: 55px;" width="150" height="55"/></a>
</span>
[![gradio-backend](https://github.com/gradio-app/gradio/actions/workflows/test-python.yml/badge.svg)](https://github.com/gradio-app/gradio/actions/workflows/test-python.yml)
[![gradio-ui](https://github.com/gradio-app/gradio/actions/workflows/tests-js.yml/badge.svg)](https://github.com/gradio-app/gradio/actions/workflows/tests-js.yml)
[![PyPI](https://img.shields.io/pypi/v/gradio)](https://pypi.org/project/gradio/)
[![PyPI downloads](https://img.shields.io/pypi/dm/gradio)](https://pypi.org/project/gradio/)
![Python version](https://img.shields.io/badge/python-3.10+-important)
[![Twitter follow](https://img.shields.io/twitter/follow/gradio?style=social&label=follow)](https://twitter.com/gradio)
[Website](https://gradio.app)
| [Documentation](https://gradio.app/docs/)
| [Guides](https://gradio.app/guides/)
| [Getting Started](https://gradio.app/getting_started/)
| [Examples](demo/)
</div>
<div align="center">
English | [中文](readme_files/zh-cn#readme)
</div>
# Gradio: Build Machine Learning Web Apps — in Python
Gradio is an open-source Python package that allows you to quickly **build** a demo or web application for your machine learning model, API, or any arbitrary Python function. You can then **share** a link to your demo or web application in just a few seconds using Gradio's built-in sharing features. *No JavaScript, CSS, or web hosting experience needed!*
<img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/gif-version.gif" style="padding-bottom: 10px">
It just takes a few lines of Python to create your own demo, so let's get started 💫
### Installation
**Prerequisite**: Gradio requires [Python 3.10 or higher](https://www.python.org/downloads/).
We recommend installing Gradio using `pip`, which is included by default in Python. Run this in your terminal or command prompt:
```bash
pip install --upgrade gradio
```
> [!TIP]
> It is best to install Gradio in a virtual environment. Detailed installation instructions for all common operating systems <a href="https://www.gradio.app/main/guides/installing-gradio-in-a-virtual-environment">are provided here</a>.
### Building Your First Demo
You can run Gradio in your favorite code editor, Jupyter notebook, Google Colab, or anywhere else you write Python. Let's write your first Gradio app:
```python
import gradio as gr
def greet(name, intensity):
return "Hello, " + name + "!" * int(intensity)
demo = gr.Interface(
fn=greet,
inputs=["text", "slider"],
outputs=["text"],
)
demo.launch()
```
> [!TIP]
> We shorten the imported name from <code>gradio</code> to <code>gr</code>. This is a widely adopted convention for better readability of code.
Now, run your code. If you've written the Python code in a file named `app.py`, then you would run `python app.py` from the terminal.
The demo below will open in a browser on [http://localhost:7860](http://localhost:7860) if running from a file. If you are running within a notebook, the demo will appear embedded within the notebook.
![`hello_world_4` demo](demo/hello_world_4/screenshot.gif)
Type your name in the textbox on the left, drag the slider, and then press the Submit button. You should see a friendly greeting on the right.
> [!TIP]
> When developing locally, you can run your Gradio app in <strong>hot reload mode</strong>, which automatically reloads the Gradio app whenever you make changes to the file. To do this, simply type in <code>gradio</code> before the name of the file instead of <code>python</code>. In the example above, you would type: `gradio app.py` in your terminal. Learn more in the <a href="https://www.gradio.app/guides/developing-faster-with-reload-mode">Hot Reloading Guide</a>.
**Understanding the `Interface` Class**
You'll notice that in order to make your first demo, you created an instance of the `gr.Interface` class. The `Interface` class is designed to create demos for machine learning models which accept one or more inputs, and return one or more outputs.
The `Interface` class has three core arguments:
- `fn`: the function to wrap a user interface (UI) around
- `inputs`: the Gradio component(s) to use for the input. The number of components should match the number of arguments in your function.
- `outputs`: the Gradio component(s) to use for the output. The number of components should match the number of return values from your function.
The `fn` argument is very flexible -- you can pass *any* Python function that you want to wrap with a UI. In the example above, we saw a relatively simple function, but the function could be anything from a music generator to a tax calculator to the prediction function of a pretrained machine learning model.
The `inputs` and `outputs` arguments take one or more Gradio components. As we'll see, Gradio includes more than [30 built-in components](https://www.gradio.app/docs/gradio/introduction) (such as the `gr.Textbox()`, `gr.Image()`, and `gr.HTML()` components) that are designed for machine learning applications.
> [!TIP]
> For the `inputs` and `outputs` arguments, you can pass in the name of these components as a string (`"textbox"`) or an instance of the class (`gr.Textbox()`).
If your function accepts more than one argument, as is the case above, pass a list of input components to `inputs`, with each input component corresponding to one of the arguments of the function, in order. The same holds true if your function returns more than one value: simply pass in a list of components to `outputs`. This flexibility makes the `Interface` class a very powerful way to create demos.
We'll dive deeper into the `gr.Interface` on our series on [building Interfaces](https://www.gradio.app/main/guides/the-interface-class).
### Sharing Your Demo
What good is a beautiful demo if you can't share it? Gradio lets you easily share a machine learning demo without having to worry about the hassle of hosting on a web server. Simply set `share=True` in `launch()`, and a publicly accessible URL will be created for your demo. Let's revisit our example demo, but change the last line as follows:
```python
import gradio as gr
def greet(name):
return "Hello " + name + "!"
demo = gr.Interface(fn=greet, inputs="textbox", outputs="textbox")
demo.launch(share=True) # Share your demo with just 1 extra parameter 🚀
```
When you run this code, a public URL will be generated for your demo in a matter of seconds, something like:
👉 &nbsp; `https://a23dsf231adb.gradio.live`
Now, anyone around the world can try your Gradio demo from their browser, while the machine learning model and all computation continues to run locally on your computer.
To learn more about sharing your demo, read our dedicated guide on [sharing your Gradio application](https://www.gradio.app/guides/sharing-your-app).
### An Overview of Gradio
So far, we've been discussing the `Interface` class, which is a high-level class that lets to build demos quickly with Gradio. But what else does Gradio include?
#### Custom Demos with `gr.Blocks`
Gradio offers a low-level approach for designing web apps with more customizable layouts and data flows with the `gr.Blocks` class. Blocks supports things like controlling where components appear on the page, handling multiple data flows and more complex interactions (e.g. outputs can serve as inputs to other functions), and updating properties/visibility of components based on user interaction — still all in Python.
You can build very custom and complex applications using `gr.Blocks()`. For example, the popular image generation [Automatic1111 Web UI](https://github.com/AUTOMATIC1111/stable-diffusion-webui) is built using Gradio Blocks. We dive deeper into the `gr.Blocks` on our series on [building with Blocks](https://www.gradio.app/guides/blocks-and-event-listeners).
#### Chatbots with `gr.ChatInterface`
Gradio includes another high-level class, `gr.ChatInterface`, which is specifically designed to create Chatbot UIs. Similar to `Interface`, you supply a function and Gradio creates a fully working Chatbot UI. If you're interested in creating a chatbot, you can jump straight to [our dedicated guide on `gr.ChatInterface`](https://www.gradio.app/guides/creating-a-chatbot-fast).
#### The Gradio Python & JavaScript Ecosystem
That's the gist of the core `gradio` Python library, but Gradio is actually so much more! It's an entire ecosystem of Python and JavaScript libraries that let you build machine learning applications, or query them programmatically, in Python or JavaScript. Here are other related parts of the Gradio ecosystem:
* [Gradio Python Client](https://www.gradio.app/guides/getting-started-with-the-python-client) (`gradio_client`): query any Gradio app programmatically in Python.
* [Gradio JavaScript Client](https://www.gradio.app/guides/getting-started-with-the-js-client) (`@gradio/client`): query any Gradio app programmatically in JavaScript.
* [Gradio-Lite](https://www.gradio.app/guides/gradio-lite) (`@gradio/lite`): write Gradio apps in Python that run entirely in the browser (no server needed!), thanks to Pyodide.
* [Hugging Face Spaces](https://huggingface.co/spaces): the most popular place to host Gradio applications — for free!
### What's Next?
Keep learning about Gradio sequentially using the Gradio Guides, which include explanations as well as example code and embedded interactive demos. Next up: [let's dive deeper into the Interface class](https://www.gradio.app/guides/the-interface-class).
Or, if you already know the basics and are looking for something specific, you can search the more [technical API documentation](https://www.gradio.app/docs/).
### Gradio Sketch
You can also build Gradio applications without writing any code. Simply type `gradio sketch` into your terminal to open up an editor that lets you define and modify Gradio components, adjust their layouts, add events, all through a web editor. Or [use this hosted version of Gradio Sketch, running on Hugging Face Spaces](https://huggingface.co/spaces/aliabid94/Sketch).
## Questions?
If you'd like to report a bug or have a feature request, please create an [issue on GitHub](https://github.com/gradio-app/gradio/issues/new/choose). For general questions about usage, we are available on [our Discord server](https://discord.com/invite/feTf9x3ZSB) and happy to help.
If you like Gradio, please leave us a ⭐ on GitHub!
## Open Source Stack
Gradio is built on top of many wonderful open-source libraries!
[<img src="readme_files/huggingface_mini.svg" alt="huggingface" height=40>](https://huggingface.co)
[<img src="readme_files/python.svg" alt="python" height=40>](https://www.python.org)
[<img src="readme_files/fastapi.svg" alt="fastapi" height=40>](https://fastapi.tiangolo.com)
[<img src="readme_files/encode.svg" alt="encode" height=40>](https://www.encode.io)
[<img src="readme_files/svelte.svg" alt="svelte" height=40>](https://svelte.dev)
[<img src="readme_files/vite.svg" alt="vite" height=40>](https://vitejs.dev)
[<img src="readme_files/pnpm.svg" alt="pnpm" height=40>](https://pnpm.io)
[<img src="readme_files/tailwind.svg" alt="tailwind" height=40>](https://tailwindcss.com)
[<img src="readme_files/storybook.svg" alt="storybook" height=40>](https://storybook.js.org/)
[<img src="readme_files/chromatic.svg" alt="chromatic" height=40>](https://www.chromatic.com/)
## License
Gradio is licensed under the Apache License 2.0 found in the [LICENSE](LICENSE) file in the root directory of this repository.
## Citation
Also check out the paper _[Gradio: Hassle-Free Sharing and Testing of ML Models in the Wild](https://arxiv.org/abs/1906.02569), ICML HILL 2019_, and please cite it if you use Gradio in your work.
```
@article{abid2019gradio,
title = {Gradio: Hassle-Free Sharing and Testing of ML Models in the Wild},
author = {Abid, Abubakar and Abdalla, Ali and Abid, Ali and Khan, Dawood and Alfozan, Abdulrahman and Zou, James},
journal = {arXiv preprint arXiv:1906.02569},
year = {2019},
}
```

View File

@ -0,0 +1,5 @@
# Security Policy
## Reporting a Vulnerability
If you discover a security vulnerability, we would be very grateful if you could email us at gradio-team@huggingface.co. This is the preferred approach instead of opening a public issue. We take all vulnerability reports seriously, and will work to patch the vulnerability immediately. Whenever possible, we will credit the person or people who report the security vulnerabilities after it has been patched.

18
gradio-5.35.0/build_pypi.sh Executable file
View File

@ -0,0 +1,18 @@
#!/bin/bash
set -e
cd "$(dirname ${0})"
# You should update the version in package.json before running this script
FILE="gradio/package.json"
new_version=$(python -c "import json; f = open('$FILE', 'r'); data = json.load(f); print(data['version']); f.close();")
GRADIO_VERSION=$new_version
rm -rf gradio/templates/frontend
pnpm i --frozen-lockfile --ignore-scripts
GRADIO_VERSION=$new_version pnpm build
aws s3 cp gradio/templates/frontend "s3://gradio/${new_version}/" --recursive --region us-west-2
rm -rf dist/*
rm -rf build/*
python3 -m build

View File

@ -0,0 +1,888 @@
# @gradio/client
## 1.15.4
### Fixes
- [#11432](https://github.com/gradio-app/gradio/pull/11432) [`dd1eee5`](https://github.com/gradio-app/gradio/commit/dd1eee5f9cd3d70773912fd6444d093bdcea321a) - Fix bug where cancelling an event would close the event stream. Thanks @freddyaboulton!
- [#11421](https://github.com/gradio-app/gradio/pull/11421) [`c2acf6e`](https://github.com/gradio-app/gradio/commit/c2acf6e33025fe7bbfe0660c182006651cc95090) - Preserve value in reload mode. Thanks @aliabid94!
## 1.15.3
### Fixes
- [#11387](https://github.com/gradio-app/gradio/pull/11387) [`8245afc`](https://github.com/gradio-app/gradio/commit/8245afc669501e1e5f0d619f452455f68a3b7667) - Define root URL in frontend. Thanks @aliabid94!
## 1.15.2
### Fixes
- [#11325](https://github.com/gradio-app/gradio/pull/11325) [`2b571e1`](https://github.com/gradio-app/gradio/commit/2b571e13afdc8031ce9c1291abf0fc7062340064) - Fix image streaming - wait for ws to open. Thanks @freddyaboulton!
## 1.15.1
### Fixes
- [#11243](https://github.com/gradio-app/gradio/pull/11243) [`35afa21`](https://github.com/gradio-app/gradio/commit/35afa21f0d6647e4fbda711f3f22a2fd54eedaf9) - Only show parameters warning when valid `endpoint_info` exists. Thanks @hannahblair!
## 1.15.0
### Features
- [#11155](https://github.com/gradio-app/gradio/pull/11155) [`30a1d9e`](https://github.com/gradio-app/gradio/commit/30a1d9e2ac3013d9c844b236410010bce97ffaf5) - Improvements to MCP page. Thanks @abidlabs!
- [#11047](https://github.com/gradio-app/gradio/pull/11047) [`6d4b8a7`](https://github.com/gradio-app/gradio/commit/6d4b8a7f10daefc9c79aa224635da23fbaeebb76) - Implement custom i18n. Thanks @hannahblair!
## 1.14.2
### Fixes
- [#11017](https://github.com/gradio-app/gradio/pull/11017) [`734b309`](https://github.com/gradio-app/gradio/commit/734b3099d79647695e635d87726666d4b28d1bcf) - Include HF token in stream requests. Thanks @nostalgebraist!
## 1.14.1
### Features
- [#10890](https://github.com/gradio-app/gradio/pull/10890) [`01b88c7`](https://github.com/gradio-app/gradio/commit/01b88c7fdedb413ba92ef6191967a8aed25e185f) - Improve API error handling in JS Client. Thanks @l2dy!
## 1.14.0
### Features
- [#10834](https://github.com/gradio-app/gradio/pull/10834) [`c05610c`](https://github.com/gradio-app/gradio/commit/c05610c87dd7f9e9fe5d0aed2fe93e40fdd32648) - Add Deep Links. Thanks @freddyaboulton!
## 1.13.1
### Features
- [#10694](https://github.com/gradio-app/gradio/pull/10694) [`16244f3`](https://github.com/gradio-app/gradio/commit/16244f3c1cb1a65ac1f719142f8fab67512fbb25) - Event Listeners in gradio sketch. Thanks @aliabid94!
### Fixes
- [#10719](https://github.com/gradio-app/gradio/pull/10719) [`b710d7c`](https://github.com/gradio-app/gradio/commit/b710d7cf13c1277fd18c7809cad0f707b880ef70) - Fix error display. Thanks @aliabid94!
## 1.13.0
### Features
- [#10500](https://github.com/gradio-app/gradio/pull/10500) [`16d419b`](https://github.com/gradio-app/gradio/commit/16d419b9f1f18ae4507d18a4739eb83ac4f3fae9) - Allow functions that solely update component properties to run in the frontend by setting `js=True`. Thanks @abidlabs!
## 1.12.0
### Features
- [#10492](https://github.com/gradio-app/gradio/pull/10492) [`29880d5`](https://github.com/gradio-app/gradio/commit/29880d51fbe7fbd222b0765a83c95134dc7d0e90) - Allow showing progress updates on arbitrary components. Thanks @abidlabs!
### Fixes
- [#10547](https://github.com/gradio-app/gradio/pull/10547) [`083d68b`](https://github.com/gradio-app/gradio/commit/083d68b223be82a65f18c553df9ae690a8118a49) - quick_fix_client. Thanks @aliabid94!
## 1.11.0
### Features
- [#10433](https://github.com/gradio-app/gradio/pull/10433) [`2e8dc74`](https://github.com/gradio-app/gradio/commit/2e8dc74f751be02f7217f78d241806b42fcdca04) - Allow building multipage Gradio apps. Thanks @aliabid94!
## 1.10.0
### Features
- [#10270](https://github.com/gradio-app/gradio/pull/10270) [`bb11a2a`](https://github.com/gradio-app/gradio/commit/bb11a2a702ca04fde245e7d54d155cbcbde7791e) - [ZeroGPU] Handshake-based postMessage. Thanks @cbensimon!
### Fixes
- [#10332](https://github.com/gradio-app/gradio/pull/10332) [`e742dcc`](https://github.com/gradio-app/gradio/commit/e742dcccb376692c9ddd5a6c251080e7c5936574) - Allow users to add a custom API route. Thanks @aliabid94!
## 1.9.0
### Features
- [#10262](https://github.com/gradio-app/gradio/pull/10262) [`f3bedd4`](https://github.com/gradio-app/gradio/commit/f3bedd4011bdfdecc952eb1275a9dd96af3e8d71) - add gr.Success and update windows contributing. Thanks @not-lain!
- [#10254](https://github.com/gradio-app/gradio/pull/10254) [`da07707`](https://github.com/gradio-app/gradio/commit/da0770748db9ea40194a43c9138ee2c6536b1247) - Add a `settings` link to the footer with i18n options & pwa instructions. Thanks @abidlabs!
## 1.8.0
### Features
- [#9930](https://github.com/gradio-app/gradio/pull/9930) [`eae345e`](https://github.com/gradio-app/gradio/commit/eae345e5fde39aea220b57c6a954cd7d72ff32d5) - Allow settings custom headers in js client. Thanks @elgiano!
- [#9950](https://github.com/gradio-app/gradio/pull/9950) [`fc06fe4`](https://github.com/gradio-app/gradio/commit/fc06fe41f015678a0545f4e5c99f6ae2704f0031) - Add ability to read and write from LocalStorage. Thanks @abidlabs!
## 1.7.1
### Fixes
- [#9814](https://github.com/gradio-app/gradio/pull/9814) [`6505d42`](https://github.com/gradio-app/gradio/commit/6505d4289a3e3d27d9133b1c8af41697fdc1476d) - support gradio apps on spaces served on subpaths. Thanks @pngwn!
## 1.7.0
### Features
- [#9681](https://github.com/gradio-app/gradio/pull/9681) [`2ed2361`](https://github.com/gradio-app/gradio/commit/2ed236187a9aab18e17fc4a8079eddef7dd195a5) - Allow setting title in gr.Info/Warning/Error. Thanks @ABucket!
## 1.6.0
### Features
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - Disable liking user message in chatbot by default but make it configurable
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - Open audio/image input stream only when queue is ready
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - Send Streaming data over Websocket if possible. Also support base64 output format for images.
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - Streaming inputs for 5.0
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - fix SSR apps on spaces
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - Ssr part 2
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - prefix api routes
### Fixes
- [#8843](https://github.com/gradio-app/gradio/pull/8843) [`6f95286`](https://github.com/gradio-app/gradio/commit/6f95286337459efbccb95c9cfac63355669df9ee) - Trigger state change event on iterators
## 1.6.0-beta.4
### Features
- [#9483](https://github.com/gradio-app/gradio/pull/9483) [`8dc7c12`](https://github.com/gradio-app/gradio/commit/8dc7c12389311b60efcde1b9d3e3668a34d2dc00) - Send Streaming data over Websocket if possible. Also support base64 output format for images. Thanks @freddyaboulton!
## 1.6.0-beta.3
### Features
- [#9412](https://github.com/gradio-app/gradio/pull/9412) [`c2c2fd9`](https://github.com/gradio-app/gradio/commit/c2c2fd989348f826566773c07c0e0bda200199ff) - fix SSR apps on spaces. Thanks @pngwn!
## 1.6.0-beta.2
### Features
- [#9323](https://github.com/gradio-app/gradio/pull/9323) [`06babda`](https://github.com/gradio-app/gradio/commit/06babda0395fd3fbd323c1c3cb33704ecfd6deb0) - Disable liking user message in chatbot by default but make it configurable. Thanks @freddyaboulton!
- [#9339](https://github.com/gradio-app/gradio/pull/9339) [`4c8c6f2`](https://github.com/gradio-app/gradio/commit/4c8c6f2fe603081941c5fdc43f48a0632b9f31ad) - Ssr part 2. Thanks @pngwn!
### Fixes
- [#9299](https://github.com/gradio-app/gradio/pull/9299) [`aa35b07`](https://github.com/gradio-app/gradio/commit/aa35b0788e613fdd45446d267513e6f94fa208ea) - Trigger state change event on iterators. Thanks @freddyaboulton!
## 1.6.0-beta.1
### Features
- [#9200](https://github.com/gradio-app/gradio/pull/9200) [`2e179d3`](https://github.com/gradio-app/gradio/commit/2e179d35be6ed60a5a6bfc7303178d63e41781ad) - prefix api routes. Thanks @pngwn!
## 1.6.0-beta.0
### Features
- [#9149](https://github.com/gradio-app/gradio/pull/9149) [`3d7a9b8`](https://github.com/gradio-app/gradio/commit/3d7a9b81f6fef06187eca832471dc1692eb493a0) - Open audio/image input stream only when queue is ready. Thanks @freddyaboulton!
- [#8941](https://github.com/gradio-app/gradio/pull/8941) [`97a7bf6`](https://github.com/gradio-app/gradio/commit/97a7bf66a79179d1b91a3199d68e5c11216ca500) - Streaming inputs for 5.0. Thanks @freddyaboulton!
## 1.5.2
### Fixes
- [#9163](https://github.com/gradio-app/gradio/pull/9163) [`2b6cbf2`](https://github.com/gradio-app/gradio/commit/2b6cbf25908e42cf027324e54ef2cc0baad11a91) - fix exports and generate types. Thanks @pngwn!
## 1.5.1
### Features
- [#9118](https://github.com/gradio-app/gradio/pull/9118) [`e1c404d`](https://github.com/gradio-app/gradio/commit/e1c404da1143fb52b659d03e028bdba1badf443d) - setup npm-previews of all packages. Thanks @pngwn!
## 1.5.0
### Features
- [#8965](https://github.com/gradio-app/gradio/pull/8965) [`d30432e`](https://github.com/gradio-app/gradio/commit/d30432e9c6d4cc1e5cfd989a1a3ae4aba7e21290) - harden CI. Thanks @pngwn!
### Fixes
- [#8847](https://github.com/gradio-app/gradio/pull/8847) [`4d8a473`](https://github.com/gradio-app/gradio/commit/4d8a473632e388a312aee5c705b3c1f79853441b) - fix: wrong named param check for js client. Thanks @freddyaboulton!
## 1.4.0
### Features
- [#8816](https://github.com/gradio-app/gradio/pull/8816) [`9ee6839`](https://github.com/gradio-app/gradio/commit/9ee6839f94d23d685a800ed3a275206e0b0e48f6) - Change optionality of the `data` param in `submit` + `predict`. Thanks @hannahblair!
### Fixes
- [#8820](https://github.com/gradio-app/gradio/pull/8820) [`5050b36`](https://github.com/gradio-app/gradio/commit/5050b36221e75a18d8a5d4f74a725e70768a4c4a) - fix: wrong named param check for js client. Thanks @JacobLinCool!
## 1.3.0
### Fixes
- [#8699](https://github.com/gradio-app/gradio/pull/8699) [`012da05`](https://github.com/gradio-app/gradio/commit/012da05287846d94beb0ecdc28d7fbc48c4248ff) - Ensure JS client `status_callback` functionality works and improve status messages. Thanks @hannahblair!
- [#8505](https://github.com/gradio-app/gradio/pull/8505) [`2943d6d`](https://github.com/gradio-app/gradio/commit/2943d6d68847314885dc6c5c0247083116017ca0) - Add Timer component. Thanks @aliabid94!
- [#8715](https://github.com/gradio-app/gradio/pull/8715) [`a6b3c6c`](https://github.com/gradio-app/gradio/commit/a6b3c6ce4e1d06253860c72740024a9138e3a93a) - Ensure `@gradio/client`'s `submit` iterator releases as expected. Thanks @pngwn!
- [#8716](https://github.com/gradio-app/gradio/pull/8716) [`e834d30`](https://github.com/gradio-app/gradio/commit/e834d302e44f7a54565129bf2c11acf4e882a59b) - ensure `@gradio/client` always returns the correct data. Thanks @pngwn!
- [#8714](https://github.com/gradio-app/gradio/pull/8714) [`1b5b5b0`](https://github.com/gradio-app/gradio/commit/1b5b5b0b43e69ee84f3baad2aae59ffc9c4d995a) - Bind `fetch` and `stream` in JS client. Thanks @hannahblair!
- [#8720](https://github.com/gradio-app/gradio/pull/8720) [`936c713`](https://github.com/gradio-app/gradio/commit/936c7137a99ef59efdf75bae5dd27eea2ac1f577) - Documents auth in the guides, in the view API page, and also types the Blocks.config object. Thanks @abidlabs!
## 1.2.1
### Features
- [#8649](https://github.com/gradio-app/gradio/pull/8649) [`4b6c8b1`](https://github.com/gradio-app/gradio/commit/4b6c8b1c004cee67345a7f103ba2dc8e90b82e6c) - ensure `File` objects are handled in JS client `handle_file`. Thanks @hannahblair!
## 1.2.0
### Features
- [#8489](https://github.com/gradio-app/gradio/pull/8489) [`c2a0d05`](https://github.com/gradio-app/gradio/commit/c2a0d056d679d90631d9ccd944dadd67e7e03b7f) - Control Display of Error, Info, Warning. Thanks @freddyaboulton!
- [#8571](https://github.com/gradio-app/gradio/pull/8571) [`a77877f`](https://github.com/gradio-app/gradio/commit/a77877f62df7c610fcfac7b3b00e186a087c8ec6) - First time loading performance optimization. Thanks @baojianting!
- [#8600](https://github.com/gradio-app/gradio/pull/8600) [`7289c4b`](https://github.com/gradio-app/gradio/commit/7289c4b036d8a78c48f8c9e66ba998e6730e80d2) - Add credentials: include and Cookie header to prevent 401 error. Thanks @yinkiu602!
- [#8522](https://github.com/gradio-app/gradio/pull/8522) [`bdaa678`](https://github.com/gradio-app/gradio/commit/bdaa678d0c0a22250b41104f32e9121f98dc7437) - add handle_file docs. Thanks @pngwn!
### Fixes
- [#8521](https://github.com/gradio-app/gradio/pull/8521) [`900cf25`](https://github.com/gradio-app/gradio/commit/900cf25256a5b0563860097d69aac28b6afbfd8b) - Ensure frontend functions work when they don't return a value. Thanks @pngwn!
- [#8548](https://github.com/gradio-app/gradio/pull/8548) [`7fc0f51`](https://github.com/gradio-app/gradio/commit/7fc0f5149bb8d31f3d01b4151b478070499751ee) - Fix reload mode by implementing `close` on the client. Thanks @freddyaboulton!
## 1.1.1
### Features
- [#8499](https://github.com/gradio-app/gradio/pull/8499) [`c5f6e77`](https://github.com/gradio-app/gradio/commit/c5f6e7722a197d4706419ade14276ddecf3196f8) - Cache break themes on change. Thanks @aliabid94!
## 1.1.0
### Features
- [#8483](https://github.com/gradio-app/gradio/pull/8483) [`e2271e2`](https://github.com/gradio-app/gradio/commit/e2271e207d98074bf39b02ae3c5443b2f097627d) - documentation for @gradio/client. Thanks @pngwn!
- [#8485](https://github.com/gradio-app/gradio/pull/8485) [`f8ebace`](https://github.com/gradio-app/gradio/commit/f8ebaceccef60a112603d290d10072ef4e938a6a) - Ensure all status are reported internally when calling `predict`. Thanks @pngwn!
## 1.0.0
### Highlights
#### Clients 1.0 Launch! ([#8468](https://github.com/gradio-app/gradio/pull/8468) [`7cc0a0c`](https://github.com/gradio-app/gradio/commit/7cc0a0c1abea585c3f50ffb1ff78d2b08ddbdd92))
We're excited to unveil the first major release of the Gradio clients.
We've made it even easier to turn any Gradio application into a production endpoint thanks to the clients' **ergonomic**, **transparent**, and **portable** design.
#### Ergonomic API 💆
**Stream From a Gradio app in 5 lines**
Use the `submit` method to get a job you can iterate over:
```python
from gradio_client import Client
client = Client("gradio/llm_stream")
for result in client.submit("What's the best UI framework in Python?"):
print(result)
```
```ts
import { Client } from "@gradio/client";
const client = await Client.connect("gradio/llm_stream")
const job = client.submit("/predict", {"text": "What's the best UI framework in Python?"})
for await (const msg of job) console.log(msg.data)
```
**Use the same keyword arguments as the app**
```python
from gradio_client import Client
client = Client("http://127.0.0.1:7860/")
result = client.predict(
message="Hello!!",
system_prompt="You are helpful AI.",
tokens=10,
api_name="/chat"
)
print(result)
```
```ts
import { Client } from "@gradio/client";
const client = await Client.connect("http://127.0.0.1:7860/");
const result = await client.predict("/chat", {
message: "Hello!!",
system_prompt: "Hello!!",
tokens: 10,
});
console.log(result.data);
```
**Better Error Messages**
If something goes wrong in the upstream app, the client will raise the same exception as the app provided that `show_error=True` in the original app's `launch()` function, or it's a `gr.Error` exception.
#### Transparent Design 🪟
Anything you can do in the UI, you can do with the client:
* 🔒 Authentication
* 🛑 Job Cancelling
* Access Queue Position and API
* 📕 View the API information
Here's an example showing how to display the queue position of a pending job:
```python
from gradio_client import Client
client = Client("gradio/diffusion_model")
job = client.submit("A cute cat")
while not job.done():
status = job.status()
print(f"Current in position {status.rank} out of {status.queue_size}")
```
#### Portable Design ⛺️
The client can run from pretty much any python and javascript environment (node, deno, the browser, Service Workers).
Here's an example using the client from a Flask server using gevent:
```python
from gevent import monkey
monkey.patch_all()
from gradio_client import Client
from flask import Flask, send_file
import time
app = Flask(__name__)
imageclient = Client("gradio/diffusion_model")
@app.route("/gen")
def gen():
result = imageclient.predict(
"A cute cat",
api_name="/predict"
)
return send_file(result)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000)
```
#### 1.0 Migration Guide and Breaking Changes
**Python**
- The `serialize` argument of the `Client` class was removed. Has no effect.
- The `upload_files` argument of the `Client` was removed.
- All filepaths must be wrapped in the `handle_file` method. Example:
```python
from gradio_client import Client, handle_file
client = Client("gradio/image_captioner")
client.predict(handle_file("cute_cat.jpg"))
```
- The `output_dir` argument was removed. It is not specified in the `download_files` argument.
**Javascript**
The client has been redesigned entirely. It was refactored from a function into a class. An instance can now be constructed by awaiting the `connect` method.
```js
const app = await Client.connect("gradio/whisper")
```
The app variable has the same methods as the python class (`submit`, `predict`, `view_api`, `duplicate`).
#### Additional Changes
- [#8243](https://github.com/gradio-app/gradio/pull/8243) - Set orig_name in python client file uploads.
- [#8264](https://github.com/gradio-app/gradio/pull/8264) - Make exceptions in the Client more specific.
- [#8247](https://github.com/gradio-app/gradio/pull/8247) - Fix api recorder.
- [#8276](https://github.com/gradio-app/gradio/pull/8276) - Fix bug where client could not connect to apps that had self signed certificates.
- [#8245](https://github.com/gradio-app/gradio/pull/8245) - Cancel server progress from the python client.
- [#8200](https://github.com/gradio-app/gradio/pull/8200) - Support custom components in gr.load
- [#8182](https://github.com/gradio-app/gradio/pull/8182) - Convert sse calls in client from async to sync.
- [#7732](https://github.com/gradio-app/gradio/pull/7732) - Adds support for kwargs and default arguments in the python client, and improves how parameter information is displayed in the "view API" page.
- [#7888](https://github.com/gradio-app/gradio/pull/7888) - Cache view_api info in server and python client.
- [#7575](https://github.com/gradio-app/gradio/pull/7575) - Files should now be supplied as `file(...)` in the Client, and some fixes to `gr.load()` as well.
- [#8401](https://github.com/gradio-app/gradio/pull/8401) - Add CDN installation to JS docs.
- [#8299](https://github.com/gradio-app/gradio/pull/8299) - Allow JS Client to work with authenticated spaces 🍪.
- [#8408](https://github.com/gradio-app/gradio/pull/8408) - Connect heartbeat if state created in render. Also fix config cleanup bug #8407.
- [#8258](https://github.com/gradio-app/gradio/pull/8258) - Improve URL handling in JS Client.
- [#8322](https://github.com/gradio-app/gradio/pull/8322) - ensure the client correctly handles all binary data.
- [#8296](https://github.com/gradio-app/gradio/pull/8296) - always create a jwt when connecting to a space if a hf_token is present.
- [#8285](https://github.com/gradio-app/gradio/pull/8285) - use the correct query param to pass the jwt to the heartbeat event.
- [#8272](https://github.com/gradio-app/gradio/pull/8272) - ensure client works for private spaces.
- [#8197](https://github.com/gradio-app/gradio/pull/8197) - Add support for passing keyword args to `data` in JS client.
- [#8252](https://github.com/gradio-app/gradio/pull/8252) - Client node fix.
- [#8209](https://github.com/gradio-app/gradio/pull/8209) - Rename `eventSource_Factory` and `fetch_implementation`.
- [#8109](https://github.com/gradio-app/gradio/pull/8109) - Implement JS Client tests.
- [#8211](https://github.com/gradio-app/gradio/pull/8211) - remove redundant event source logic.
- [#8179](https://github.com/gradio-app/gradio/pull/8179) - rework upload to be a class method + pass client into each component.
- [#8181](https://github.com/gradio-app/gradio/pull/8181) - Ensure connectivity to private HF spaces with SSE protocol.
- [#8169](https://github.com/gradio-app/gradio/pull/8169) - Only connect to heartbeat if needed.
- [#8118](https://github.com/gradio-app/gradio/pull/8118) - Add eventsource polyfill for Node.js and browser environments.
- [#7646](https://github.com/gradio-app/gradio/pull/7646) - Refactor JS Client.
- [#7974](https://github.com/gradio-app/gradio/pull/7974) - Fix heartbeat in the js client to be Lite compatible.
- [#7926](https://github.com/gradio-app/gradio/pull/7926) - Fixes streaming event race condition.
Thanks @freddyaboulton!
### Features
- [#8370](https://github.com/gradio-app/gradio/pull/8370) [`48eeea4`](https://github.com/gradio-app/gradio/commit/48eeea4eaab7e24168688e3c3fbafb30e4e78d51) - Refactor Cancelling Logic To Use /cancel. Thanks @freddyaboulton!
### Fixes
- [#8477](https://github.com/gradio-app/gradio/pull/8477) [`d5a9604`](https://github.com/gradio-app/gradio/commit/d5a960493017a4890685af61d78ce7d3b3b12e6b) - Fix js client bundle. Thanks @pngwn!
- [#8451](https://github.com/gradio-app/gradio/pull/8451) [`9d2d605`](https://github.com/gradio-app/gradio/commit/9d2d6051caed5c8749a26a6fa7480a5ae6e6c4f3) - Change client submit API to be an AsyncIterable and support more platforms. Thanks @pngwn!
- [#8462](https://github.com/gradio-app/gradio/pull/8462) [`6447dfa`](https://github.com/gradio-app/gradio/commit/6447dface4d46db1c69460e8325a1928d0476a46) - Improve file handling in JS Client. Thanks @hannahblair!
- [#8439](https://github.com/gradio-app/gradio/pull/8439) [`63d36fb`](https://github.com/gradio-app/gradio/commit/63d36fbbf4bf6dc909be9a0ffc7b6bf6621d83e8) - Handle gradio apps using `state` in the JS Client. Thanks @hannahblair!
## 0.20.1
### Features
- [#8415](https://github.com/gradio-app/gradio/pull/8415) [`227de35`](https://github.com/gradio-app/gradio/commit/227de352982b3dcdf9384eaa28b7e9cf09afb6e8) - Fix spaces load error. Thanks @aliabid94!
## 0.20.0
### Features
- [#8401](https://github.com/gradio-app/gradio/pull/8401) [`d078621`](https://github.com/gradio-app/gradio/commit/d078621928136c09ca902d2f37594ed887c67d2e) - Add CDN installation to JS docs. Thanks @hannahblair!
- [#8243](https://github.com/gradio-app/gradio/pull/8243) [`55f664f`](https://github.com/gradio-app/gradio/commit/55f664f2979a49acc29a73cde16c6ebdfcc91db2) - Add event listener support to render blocks. Thanks @aliabid94!
- [#8398](https://github.com/gradio-app/gradio/pull/8398) [`945ac83`](https://github.com/gradio-app/gradio/commit/945ac837e779b120790814ea6f6f81bd2712f5f8) - Improve rendering. Thanks @aliabid94!
- [#8299](https://github.com/gradio-app/gradio/pull/8299) [`ab65360`](https://github.com/gradio-app/gradio/commit/ab653608045ff9462db7ad9fe63e1c60bf20e773) - Allow JS Client to work with authenticated spaces 🍪. Thanks @hannahblair!
### Fixes
- [#8408](https://github.com/gradio-app/gradio/pull/8408) [`e86dd01`](https://github.com/gradio-app/gradio/commit/e86dd01b6e8f7bab3d3c25b84f2ad33129138af4) - Connect heartbeat if state created in render. Also fix config cleanup bug #8407. Thanks @freddyaboulton!
- [#8258](https://github.com/gradio-app/gradio/pull/8258) [`1f8e5c4`](https://github.com/gradio-app/gradio/commit/1f8e5c44e054b943052d8f24d044696ddfd01a54) - Improve URL handling in JS Client. Thanks @hannahblair!
## 0.19.4
### Fixes
- [#8322](https://github.com/gradio-app/gradio/pull/8322) [`47012a0`](https://github.com/gradio-app/gradio/commit/47012a0c4e3e8a80fcae620aaf08b16ceb343cde) - ensure the client correctly handles all binary data. Thanks @Saghen!
## 0.19.3
### Features
- [#8229](https://github.com/gradio-app/gradio/pull/8229) [`7c81897`](https://github.com/gradio-app/gradio/commit/7c81897076ddcd0bb05e0e4ffec35bb9a986d330) - chore(deps): update dependency esbuild to ^0.21.0. Thanks @renovate!
### Fixes
- [#8296](https://github.com/gradio-app/gradio/pull/8296) [`929d216`](https://github.com/gradio-app/gradio/commit/929d216d49aa05614bc83f0761cf7b1cd803d8fe) - always create a jwt when connecting to a space if a hf_token is present. Thanks @pngwn!
## 0.19.2
### Fixes
- [#8285](https://github.com/gradio-app/gradio/pull/8285) [`7d9d8ea`](https://github.com/gradio-app/gradio/commit/7d9d8eab50d36cbecbb84c6a0f3cc1bca7215604) - use the correct query param to pass the jwt to the heartbeat event. Thanks @pngwn!
## 0.19.1
### Fixes
- [#8272](https://github.com/gradio-app/gradio/pull/8272) [`fbf4edd`](https://github.com/gradio-app/gradio/commit/fbf4edde7c896cdf4c903463e44c31ed96111b3c) - ensure client works for private spaces. Thanks @pngwn!
## 0.19.0
### Features
- [#8110](https://github.com/gradio-app/gradio/pull/8110) [`5436031`](https://github.com/gradio-app/gradio/commit/5436031f92c1596282eb64e1e74d555f279e9697) - Render decorator 2. Thanks @aliabid94!
- [#8197](https://github.com/gradio-app/gradio/pull/8197) [`e09b4e8`](https://github.com/gradio-app/gradio/commit/e09b4e8216b970bc1b142a0f08e7d190b954eb35) - Add support for passing keyword args to `data` in JS client. Thanks @hannahblair!
### Fixes
- [#8252](https://github.com/gradio-app/gradio/pull/8252) [`22df61a`](https://github.com/gradio-app/gradio/commit/22df61a26adf8023f6dd49c051979990e8d3879a) - Client node fix. Thanks @pngwn!
## 0.18.0
### Features
- [#8121](https://github.com/gradio-app/gradio/pull/8121) [`f5b710c`](https://github.com/gradio-app/gradio/commit/f5b710c919b0ce604ea955f0d5f4faa91095ca4a) - chore(deps): update dependency eslint to v9. Thanks @renovate!
- [#8209](https://github.com/gradio-app/gradio/pull/8209) [`b9afe93`](https://github.com/gradio-app/gradio/commit/b9afe93915401df5bd6737c89395c2477acfa585) - Rename `eventSource_Factory` and `fetch_implementation`. Thanks @hannahblair!
- [#8109](https://github.com/gradio-app/gradio/pull/8109) [`bed2f82`](https://github.com/gradio-app/gradio/commit/bed2f82e2297b50f7b59423a3de05af0b9910724) - Implement JS Client tests. Thanks @hannahblair!
- [#8211](https://github.com/gradio-app/gradio/pull/8211) [`91b5cd6`](https://github.com/gradio-app/gradio/commit/91b5cd6132fb8903c92f70fce0800324836a1fc3) - remove redundant event source logic. Thanks @hannahblair!
### Fixes
- [#8179](https://github.com/gradio-app/gradio/pull/8179) [`6a218b4`](https://github.com/gradio-app/gradio/commit/6a218b4148095aaa0c58d8c20973ba01c8764fc2) - rework upload to be a class method + pass client into each component. Thanks @pngwn!
- [#8181](https://github.com/gradio-app/gradio/pull/8181) [`cf52ca6`](https://github.com/gradio-app/gradio/commit/cf52ca6a51320ece97f009a177792840b5fbc785) - Ensure connectivity to private HF spaces with SSE protocol. Thanks @hannahblair!
- [#8169](https://github.com/gradio-app/gradio/pull/8169) [`3a6f1a5`](https://github.com/gradio-app/gradio/commit/3a6f1a50b263e0a733f609a08019fc4d05480e1a) - Only connect to heartbeat if needed. Thanks @freddyaboulton!
- [#8118](https://github.com/gradio-app/gradio/pull/8118) [`7aca673`](https://github.com/gradio-app/gradio/commit/7aca673b38a087533524b2fd8dd3a03e0e4bacfe) - Add eventsource polyfill for Node.js and browser environments. Thanks @hannahblair!
## 0.17.0
### Highlights
#### Setting File Upload Limits ([#7909](https://github.com/gradio-app/gradio/pull/7909) [`2afca65`](https://github.com/gradio-app/gradio/commit/2afca6541912b37dc84f447c7ad4af21607d7c72))
We have added a `max_file_size` size parameter to `launch()` that limits to size of files uploaded to the server. This limit applies to each individual file. This parameter can be specified as a string or an integer (corresponding to the size in bytes).
The following code snippet sets a max file size of 5 megabytes.
```python
import gradio as gr
demo = gr.Interface(lambda x: x, "image", "image")
demo.launch(max_file_size="5mb")
# or
demo.launch(max_file_size=5 * gr.FileSize.MB)
```
![max_file_size_upload](https://github.com/gradio-app/gradio/assets/41651716/7547330c-a082-4901-a291-3f150a197e45)
#### Error states can now be cleared
When a component encounters an error, the error state shown in the UI can now be cleared by clicking on the `x` icon in the top right of the component. This applies to all types of errors, whether it's raised in the UI or the server.
![error_modal_calculator](https://github.com/gradio-app/gradio/assets/41651716/16cb071c-accd-45a6-9c18-0dea27d4bd98)
Thanks @freddyaboulton!
### Features
- [#8056](https://github.com/gradio-app/gradio/pull/8056) [`2e469a5`](https://github.com/gradio-app/gradio/commit/2e469a5f99e52a5011a010f46e47dde7bb0c7140) - Using keys to preserve values between reloads. Thanks @aliabid94!
- [#7646](https://github.com/gradio-app/gradio/pull/7646) [`450b8cc`](https://github.com/gradio-app/gradio/commit/450b8cc898f130f15caa3742f65c17b9f7a8f398) - Refactor JS Client. Thanks @hannahblair!
- [#8061](https://github.com/gradio-app/gradio/pull/8061) [`17e83c9`](https://github.com/gradio-app/gradio/commit/17e83c958ebb35b3e122ca486067d1bd5ce33a22) - Docs Reorg and Intro Page. Thanks @aliabd!
### Fixes
- [#8066](https://github.com/gradio-app/gradio/pull/8066) [`624f9b9`](https://github.com/gradio-app/gradio/commit/624f9b9477f74a581a6c14119234f9efdfcda398) - make gradio dev tools a local dependency rather than bundling. Thanks @pngwn!
## 0.16.0
### Features
- [#7845](https://github.com/gradio-app/gradio/pull/7845) [`dbb7373`](https://github.com/gradio-app/gradio/commit/dbb7373dde69d4ed2741942b5a1898f8620cec24) - ensure `ImageEditor` events work as expected. Thanks @pngwn!
### Fixes
- [#7974](https://github.com/gradio-app/gradio/pull/7974) [`79e0aa8`](https://github.com/gradio-app/gradio/commit/79e0aa81c94e755faa6e85d76ac5d5a666313e6a) - Fix heartbeat in the js client to be Lite compatible. Thanks @whitphx!
## 0.15.1
### Fixes
- [#7926](https://github.com/gradio-app/gradio/pull/7926) [`9666854`](https://github.com/gradio-app/gradio/commit/966685479078f59430b3bced7e6068eb8157c003) - Fixes streaming event race condition. Thanks @aliabid94!
## 0.15.0
### Highlights
#### Automatically delete state after user has disconnected from the webpage ([#7829](https://github.com/gradio-app/gradio/pull/7829) [`6a4bf7a`](https://github.com/gradio-app/gradio/commit/6a4bf7abe29059dbdc6a342e0366fdaa2e4120ee))
Gradio now automatically deletes `gr.State` variables stored in the server's RAM when users close their browser tab.
The deletion will happen 60 minutes after the server detected a disconnect from the user's browser.
If the user connects again in that timeframe, their state will not be deleted.
Additionally, Gradio now includes a `Blocks.unload()` event, allowing you to run arbitrary cleanup functions when users disconnect (this does not have a 60 minute delay).
You can think of the `unload` event as the opposite of the `load` event.
```python
with gr.Blocks() as demo:
gr.Markdown(
"""# State Cleanup Demo
🖼️ Images are saved in a user-specific directory and deleted when the users closes the page via demo.unload.
""")
with gr.Row():
with gr.Column(scale=1):
with gr.Row():
img = gr.Image(label="Generated Image", height=300, width=300)
with gr.Row():
gen = gr.Button(value="Generate")
with gr.Row():
history = gr.Gallery(label="Previous Generations", height=500, columns=10)
state = gr.State(value=[], delete_callback=lambda v: print("STATE DELETED"))
demo.load(generate_random_img, [state], [img, state, history])
gen.click(generate_random_img, [state], [img, state, history])
demo.unload(delete_directory)
demo.launch(auth=lambda user,pwd: True,
auth_message="Enter any username and password to continue")
```
Thanks @freddyaboulton!
## 0.14.0
### Features
- [#7691](https://github.com/gradio-app/gradio/pull/7691) [`84f81fe`](https://github.com/gradio-app/gradio/commit/84f81fec9287b041203a141bbf2852720f7d199c) - Closing stream from the backend. Thanks @aliabid94!
### Fixes
- [#7564](https://github.com/gradio-app/gradio/pull/7564) [`5d1e8da`](https://github.com/gradio-app/gradio/commit/5d1e8dae5ac23f605c3b5f41dbe18751dff380a0) - batch UI updates on a per frame basis. Thanks @pngwn!
## 0.13.0
### Fixes
- [#7575](https://github.com/gradio-app/gradio/pull/7575) [`d0688b3`](https://github.com/gradio-app/gradio/commit/d0688b3c25feabb4fc7dfa0ab86086b3af7eb337) - Files should now be supplied as `file(...)` in the Client, and some fixes to `gr.load()` as well. Thanks @abidlabs!
## 0.12.2
### Features
- [#7528](https://github.com/gradio-app/gradio/pull/7528) [`eda33b3`](https://github.com/gradio-app/gradio/commit/eda33b3763897a542acf298e523fa493dc655aee) - Refactors `get_fetchable_url_or_file()` to remove it from the frontend. Thanks [@abidlabs](https://github.com/abidlabs)!
- [#7340](https://github.com/gradio-app/gradio/pull/7340) [`4b0d589`](https://github.com/gradio-app/gradio/commit/4b0d58933057432758a54169a360eb352903d6b4) - chore(deps): update all non-major dependencies. Thanks [@renovate](https://github.com/apps/renovate)!
## 0.12.1
### Fixes
- [#7411](https://github.com/gradio-app/gradio/pull/7411) [`32b317f`](https://github.com/gradio-app/gradio/commit/32b317f24e3d43f26684bb9f3964f31efd0ea556) - Set `root` correctly for Gradio apps that are deployed behind reverse proxies. Thanks [@abidlabs](https://github.com/abidlabs)!
## 0.12.0
### Features
- [#7183](https://github.com/gradio-app/gradio/pull/7183) [`49d9c48`](https://github.com/gradio-app/gradio/commit/49d9c48537aa706bf72628e3640389470138bdc6) - [WIP] Refactor file normalization to be in the backend and remove it from the frontend of each component. Thanks [@abidlabs](https://github.com/abidlabs)!
## 0.11.0
### Features
- [#7102](https://github.com/gradio-app/gradio/pull/7102) [`68a54a7`](https://github.com/gradio-app/gradio/commit/68a54a7a310d8d7072fdae930bf1cfdf12c45a7f) - Improve chatbot streaming performance with diffs. Thanks [@aliabid94](https://github.com/aliabid94)!/n Note that this PR changes the API format for generator functions, which would be a breaking change for any clients reading the EventStream directly
## 0.10.1
### Fixes
- [#7055](https://github.com/gradio-app/gradio/pull/7055) [`3c3cf86`](https://github.com/gradio-app/gradio/commit/3c3cf8618a8cad1ef66a7f96664923d2c9f5e0e2) - Fix UI freeze on rapid generators. Thanks [@aliabid94](https://github.com/aliabid94)!
## 0.10.0
### Features
- [#6931](https://github.com/gradio-app/gradio/pull/6931) [`6c863af`](https://github.com/gradio-app/gradio/commit/6c863af92fa9ceb5c638857eb22cc5ddb718d549) - Fix functional tests. Thanks [@aliabid94](https://github.com/aliabid94)!
- [#6820](https://github.com/gradio-app/gradio/pull/6820) [`649cd4d`](https://github.com/gradio-app/gradio/commit/649cd4d68041d11fcbe31f8efa455345ac49fc74) - Use `EventSource_factory` in `open_stream()` for Wasm. Thanks [@whitphx](https://github.com/whitphx)!
## 0.9.4
### Fixes
- [#6863](https://github.com/gradio-app/gradio/pull/6863) [`d406855`](https://github.com/gradio-app/gradio/commit/d4068557953746662235d595ec435c42ceb24414) - Fix JS Client when app is running behind a proxy. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
## 0.9.3
### Features
- [#6814](https://github.com/gradio-app/gradio/pull/6814) [`828fb9e`](https://github.com/gradio-app/gradio/commit/828fb9e6ce15b6ea08318675a2361117596a1b5d) - Refactor queue so that there are separate queues for each concurrency id. Thanks [@aliabid94](https://github.com/aliabid94)!
## 0.9.2
### Features
- [#6798](https://github.com/gradio-app/gradio/pull/6798) [`245d58e`](https://github.com/gradio-app/gradio/commit/245d58eff788e8d44a59d37a2d9b26d0f08a62b4) - Improve how server/js client handle unexpected errors. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
## 0.9.1
### Fixes
- [#6693](https://github.com/gradio-app/gradio/pull/6693) [`34f9431`](https://github.com/gradio-app/gradio/commit/34f943101bf7dd6b8a8974a6131c1ed7c4a0dac0) - Python client properly handles hearbeat and log messages. Also handles responses longer than 65k. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
## 0.9.0
### Features
- [#6398](https://github.com/gradio-app/gradio/pull/6398) [`67ddd40`](https://github.com/gradio-app/gradio/commit/67ddd40b4b70d3a37cb1637c33620f8d197dbee0) - Lite v4. Thanks [@whitphx](https://github.com/whitphx)!
### Fixes
- [#6556](https://github.com/gradio-app/gradio/pull/6556) [`d76bcaa`](https://github.com/gradio-app/gradio/commit/d76bcaaaf0734aaf49a680f94ea9d4d22a602e70) - Fix api event drops. Thanks [@aliabid94](https://github.com/aliabid94)!
## 0.8.2
### Features
- [#6511](https://github.com/gradio-app/gradio/pull/6511) [`71f1a1f99`](https://github.com/gradio-app/gradio/commit/71f1a1f9931489d465c2c1302a5c8d768a3cd23a) - Mark `FileData.orig_name` optional on the frontend aligning the type definition on the Python side. Thanks [@whitphx](https://github.com/whitphx)!
## 0.8.1
### Fixes
- [#6383](https://github.com/gradio-app/gradio/pull/6383) [`324867f63`](https://github.com/gradio-app/gradio/commit/324867f63c920113d89a565892aa596cf8b1e486) - Fix event target. Thanks [@aliabid94](https://github.com/aliabid94)!
## 0.8.0
### Features
- [#6307](https://github.com/gradio-app/gradio/pull/6307) [`f1409f95e`](https://github.com/gradio-app/gradio/commit/f1409f95ed39c5565bed6a601e41f94e30196a57) - Provide status updates on file uploads. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
## 0.7.2
### Fixes
- [#6327](https://github.com/gradio-app/gradio/pull/6327) [`bca6c2c80`](https://github.com/gradio-app/gradio/commit/bca6c2c80f7e5062427019de45c282238388af95) - Restore query parameters in request. Thanks [@aliabid94](https://github.com/aliabid94)!
## 0.7.1
### Features
- [#6137](https://github.com/gradio-app/gradio/pull/6137) [`2ba14b284`](https://github.com/gradio-app/gradio/commit/2ba14b284f908aa13859f4337167a157075a68eb) - JS Param. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)!
## 0.7.0
### Features
- [#5498](https://github.com/gradio-app/gradio/pull/5498) [`287fe6782`](https://github.com/gradio-app/gradio/commit/287fe6782825479513e79a5cf0ba0fbfe51443d7) - fix circular dependency with client + upload. Thanks [@pngwn](https://github.com/pngwn)!
- [#5498](https://github.com/gradio-app/gradio/pull/5498) [`287fe6782`](https://github.com/gradio-app/gradio/commit/287fe6782825479513e79a5cf0ba0fbfe51443d7) - Image v4. Thanks [@pngwn](https://github.com/pngwn)!
- [#5498](https://github.com/gradio-app/gradio/pull/5498) [`287fe6782`](https://github.com/gradio-app/gradio/commit/287fe6782825479513e79a5cf0ba0fbfe51443d7) - Swap websockets for SSE. Thanks [@pngwn](https://github.com/pngwn)!
## 0.7.0-beta.1
### Features
- [#6143](https://github.com/gradio-app/gradio/pull/6143) [`e4f7b4b40`](https://github.com/gradio-app/gradio/commit/e4f7b4b409323b01aa01b39e15ce6139e29aa073) - fix circular dependency with client + upload. Thanks [@pngwn](https://github.com/pngwn)!
- [#6094](https://github.com/gradio-app/gradio/pull/6094) [`c476bd5a5`](https://github.com/gradio-app/gradio/commit/c476bd5a5b70836163b9c69bf4bfe068b17fbe13) - Image v4. Thanks [@pngwn](https://github.com/pngwn)!
- [#6069](https://github.com/gradio-app/gradio/pull/6069) [`bf127e124`](https://github.com/gradio-app/gradio/commit/bf127e1241a41401e144874ea468dff8474eb505) - Swap websockets for SSE. Thanks [@aliabid94](https://github.com/aliabid94)!
## 0.7.0-beta.0
### Features
- [#6016](https://github.com/gradio-app/gradio/pull/6016) [`83e947676`](https://github.com/gradio-app/gradio/commit/83e947676d327ca2ab6ae2a2d710c78961c771a0) - Format js in v4 branch. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
### Fixes
- [#6046](https://github.com/gradio-app/gradio/pull/6046) [`dbb7de5e0`](https://github.com/gradio-app/gradio/commit/dbb7de5e02c53fee05889d696d764d212cb96c74) - fix tests. Thanks [@pngwn](https://github.com/pngwn)!
## 0.6.0
### Features
- [#5972](https://github.com/gradio-app/gradio/pull/5972) [`11a300791`](https://github.com/gradio-app/gradio/commit/11a3007916071f0791844b0a37f0fb4cec69cea3) - Lite: Support opening the entrypoint HTML page directly in browser via the `file:` protocol. Thanks [@whitphx](https://github.com/whitphx)!
## 0.5.2
### Fixes
- [#5840](https://github.com/gradio-app/gradio/pull/5840) [`4e62b8493`](https://github.com/gradio-app/gradio/commit/4e62b8493dfce50bafafe49f1a5deb929d822103) - Ensure websocket polyfill doesn't load if there is already a `global.Webocket` property set. Thanks [@Jay2theWhy](https://github.com/Jay2theWhy)!
## 0.5.1
### Fixes
- [#5816](https://github.com/gradio-app/gradio/pull/5816) [`796145e2c`](https://github.com/gradio-app/gradio/commit/796145e2c48c4087bec17f8ec0be4ceee47170cb) - Fix calls to the component server so that `gr.FileExplorer` works on Spaces. Thanks [@abidlabs](https://github.com/abidlabs)!
## 0.5.0
### Highlights
#### new `FileExplorer` component ([#5672](https://github.com/gradio-app/gradio/pull/5672) [`e4a307ed6`](https://github.com/gradio-app/gradio/commit/e4a307ed6cde3bbdf4ff2f17655739addeec941e))
Thanks to a new capability that allows components to communicate directly with the server _without_ passing data via the value, we have created a new `FileExplorer` component.
This component allows you to populate the explorer by passing a glob, but only provides the selected file(s) in your prediction function.
Users can then navigate the virtual filesystem and select files which will be accessible in your predict function. This component will allow developers to build more complex spaces, with more flexible input options.
![output](https://github.com/pngwn/MDsveX/assets/12937446/ef108f0b-0e84-4292-9984-9dc66b3e144d)
For more information check the [`FileExplorer` documentation](https://gradio.app/docs/fileexplorer).
Thanks [@aliabid94](https://github.com/aliabid94)!
### Features
- [#5787](https://github.com/gradio-app/gradio/pull/5787) [`caeee8bf7`](https://github.com/gradio-app/gradio/commit/caeee8bf7821fd5fe2f936ed82483bed00f613ec) - ensure the client does not depend on `window` when running in a node environment. Thanks [@gibiee](https://github.com/gibiee)!
### Fixes
- [#5776](https://github.com/gradio-app/gradio/pull/5776) [`c0fef4454`](https://github.com/gradio-app/gradio/commit/c0fef44541bfa61568bdcfcdfc7d7d79869ab1df) - Revert replica proxy logic and instead implement using the `root` variable. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
## 0.4.2
### Features
- [#5124](https://github.com/gradio-app/gradio/pull/5124) [`6e56a0d9b`](https://github.com/gradio-app/gradio/commit/6e56a0d9b0c863e76c69e1183d9d40196922b4cd) - Lite: Websocket queueing. Thanks [@whitphx](https://github.com/whitphx)!
## 0.4.1
### Fixes
- [#5705](https://github.com/gradio-app/gradio/pull/5705) [`78e7cf516`](https://github.com/gradio-app/gradio/commit/78e7cf5163e8d205e8999428fce4c02dbdece25f) - ensure internal data has updated before dispatching `success` or `then` events. Thanks [@pngwn](https://github.com/pngwn)!
## 0.4.0
### Features
- [#5682](https://github.com/gradio-app/gradio/pull/5682) [`c57f1b75e`](https://github.com/gradio-app/gradio/commit/c57f1b75e272c76b0af4d6bd0c7f44743ff34f26) - Fix functional tests. Thanks [@abidlabs](https://github.com/abidlabs)!
- [#5681](https://github.com/gradio-app/gradio/pull/5681) [`40de3d217`](https://github.com/gradio-app/gradio/commit/40de3d2178b61ebe424b6f6228f94c0c6f679bea) - add query parameters to the `gr.Request` object through the `query_params` attribute. Thanks [@DarhkVoyd](https://github.com/DarhkVoyd)!
- [#5653](https://github.com/gradio-app/gradio/pull/5653) [`ea0e00b20`](https://github.com/gradio-app/gradio/commit/ea0e00b207b4b90a10e9d054c4202d4e705a29ba) - Prevent Clients from accessing API endpoints that set `api_name=False`. Thanks [@abidlabs](https://github.com/abidlabs)!
## 0.3.1
### Fixes
- [#5412](https://github.com/gradio-app/gradio/pull/5412) [`26fef8c7`](https://github.com/gradio-app/gradio/commit/26fef8c7f85a006c7e25cdbed1792df19c512d02) - Skip view_api request in js client when auth enabled. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
## 0.3.0
### Features
- [#5267](https://github.com/gradio-app/gradio/pull/5267) [`119c8343`](https://github.com/gradio-app/gradio/commit/119c834331bfae60d4742c8f20e9cdecdd67e8c2) - Faster reload mode. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
## 0.2.1
### Features
- [#5173](https://github.com/gradio-app/gradio/pull/5173) [`730f0c1d`](https://github.com/gradio-app/gradio/commit/730f0c1d54792eb11359e40c9f2326e8a6e39203) - Ensure gradio client works as expected for functions that return nothing. Thanks [@raymondtri](https://github.com/raymondtri)!
## 0.2.0
### Features
- [#5133](https://github.com/gradio-app/gradio/pull/5133) [`61129052`](https://github.com/gradio-app/gradio/commit/61129052ed1391a75c825c891d57fa0ad6c09fc8) - Update dependency esbuild to ^0.19.0. Thanks [@renovate](https://github.com/apps/renovate)!
- [#5035](https://github.com/gradio-app/gradio/pull/5035) [`8b4eb8ca`](https://github.com/gradio-app/gradio/commit/8b4eb8cac9ea07bde31b44e2006ca2b7b5f4de36) - JS Client: Fixes cannot read properties of null (reading 'is_file'). Thanks [@raymondtri](https://github.com/raymondtri)!
### Fixes
- [#5075](https://github.com/gradio-app/gradio/pull/5075) [`67265a58`](https://github.com/gradio-app/gradio/commit/67265a58027ef1f9e4c0eb849a532f72eaebde48) - Allow supporting >1000 files in `gr.File()` and `gr.UploadButton()`. Thanks [@abidlabs](https://github.com/abidlabs)!
## 0.1.4
### Patch Changes
- [#4717](https://github.com/gradio-app/gradio/pull/4717) [`ab5d1ea0`](https://github.com/gradio-app/gradio/commit/ab5d1ea0de87ed888779b66fd2a705583bd29e02) Thanks [@whitphx](https://github.com/whitphx)! - Fix the package description
## 0.1.3
### Patch Changes
- [#4357](https://github.com/gradio-app/gradio/pull/4357) [`0dbd8f7f`](https://github.com/gradio-app/gradio/commit/0dbd8f7fee4b4877f783fa7bc493f98bbfc3d01d) Thanks [@pngwn](https://github.com/pngwn)! - Various internal refactors and cleanups.
## 0.1.2
### Patch Changes
- [#4273](https://github.com/gradio-app/gradio/pull/4273) [`1d0f0a9d`](https://github.com/gradio-app/gradio/commit/1d0f0a9db096552e67eb2197c932342587e9e61e) Thanks [@pngwn](https://github.com/pngwn)! - Ensure websocket error messages are correctly handled.
- [#4315](https://github.com/gradio-app/gradio/pull/4315) [`b525b122`](https://github.com/gradio-app/gradio/commit/b525b122dd8569bbaf7e06db5b90d622d2e9073d) Thanks [@whitphx](https://github.com/whitphx)! - Refacor types.
- [#4271](https://github.com/gradio-app/gradio/pull/4271) [`1151c525`](https://github.com/gradio-app/gradio/commit/1151c5253554cb87ebd4a44a8a470ac215ff782b) Thanks [@pngwn](https://github.com/pngwn)! - Ensure the full root path is always respected when making requests to a gradio app server.
## 0.1.1
### Patch Changes
- [#4201](https://github.com/gradio-app/gradio/pull/4201) [`da5b4ee1`](https://github.com/gradio-app/gradio/commit/da5b4ee11721175858ded96e5710225369097f74) Thanks [@pngwn](https://github.com/pngwn)! - Ensure semiver is bundled so CDN links work correctly.
- [#4202](https://github.com/gradio-app/gradio/pull/4202) [`a26e9afd`](https://github.com/gradio-app/gradio/commit/a26e9afde319382993e6ddc77cc4e56337a31248) Thanks [@pngwn](https://github.com/pngwn)! - Ensure all URLs returned by the client are complete URLs with the correct host instead of an absolute path relative to a server.
## 0.1.0
### Minor Changes
- [#4185](https://github.com/gradio-app/gradio/pull/4185) [`67239ca9`](https://github.com/gradio-app/gradio/commit/67239ca9b2fe3796853fbf7bf865c9e4b383200d) Thanks [@pngwn](https://github.com/pngwn)! - Update client for initial release
### Patch Changes
- [#3692](https://github.com/gradio-app/gradio/pull/3692) [`48e8b113`](https://github.com/gradio-app/gradio/commit/48e8b113f4b55e461d9da4f153bf72aeb4adf0f1) Thanks [@pngwn](https://github.com/pngwn)! - Ensure client works in node, create ESM bundle and generate typescript declaration files.
- [#3605](https://github.com/gradio-app/gradio/pull/3605) [`ae4277a9`](https://github.com/gradio-app/gradio/commit/ae4277a9a83d49bdadfe523b0739ba988128e73b) Thanks [@pngwn](https://github.com/pngwn)! - Update readme.

View File

@ -0,0 +1,448 @@
## JavaScript Client Library
Interact with Gradio APIs using our JavaScript (and TypeScript) client.
## Installation
The Gradio JavaScript Client is available on npm as `@gradio/client`. You can install it as below:
```shell
npm i @gradio/client
```
Or, you can include it directly in your HTML via the jsDelivr CDN:
```shell
<script src="https://cdn.jsdelivr.net/npm/@gradio/client/dist/index.min.js"></script>
```
## Usage
The JavaScript Gradio Client exposes the Client class, `Client`, along with various other utility functions. `Client` is used to initialise and establish a connection to, or duplicate, a Gradio app.
### `Client`
The Client function connects to the API of a hosted Gradio space and returns an object that allows you to make calls to that API.
The simplest example looks like this:
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const result = await app.predict("/predict");
```
This function accepts two arguments: `source` and `options`:
#### `source`
This is the url or name of the gradio app whose API you wish to connect to. This parameter is required and should always be a string. For example:
```ts
Client.connect("user/space-name");
```
#### `options`
The options object can optionally be passed a second parameter. This object has two properties, `hf_token` and `status_callback`.
##### `hf_token`
This should be a Hugging Face personal access token and is required if you wish to make calls to a private gradio api. This option is optional and should be a string starting with `"hf_"`.
Example:
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name", { hf_token: "hf_..." });
```
##### `status_callback`
This should be a function which will notify you of the status of a space if it is not running. If the gradio API you are connecting to is not awake and running or is not hosted on Hugging Face space then this function will do nothing.
**Additional context**
Applications hosted on Hugging Face spaces can be in a number of different states. As spaces are a GitOps tool and will rebuild when new changes are pushed to the repository, they have various building, running and error states. If a space is not 'running' then the function passed as the `status_callback` will notify you of the current state of the space and the status of the space as it changes. Spaces that are building or sleeping can take longer than usual to respond, so you can use this information to give users feedback about the progress of their action.
```ts
import { Client, type SpaceStatus } from "@gradio/client";
const app = await Client.connect("user/space-name", {
// The space_status parameter does not need to be manually annotated, this is just for illustration.
space_status: (space_status: SpaceStatus) => console.log(space_status)
});
```
```ts
interface SpaceStatusNormal {
status: "sleeping" | "running" | "building" | "error" | "stopped";
detail:
| "SLEEPING"
| "RUNNING"
| "RUNNING_BUILDING"
| "BUILDING"
| "NOT_FOUND";
load_status: "pending" | "error" | "complete" | "generating";
message: string;
}
interface SpaceStatusError {
status: "space_error";
detail: "NO_APP_FILE" | "CONFIG_ERROR" | "BUILD_ERROR" | "RUNTIME_ERROR";
load_status: "error";
message: string;
discussions_enabled: boolean;
type SpaceStatus = SpaceStatusNormal | SpaceStatusError;
```
The gradio client returns an object with a number of methods and properties:
#### `predict`
The `predict` method allows you to call an api endpoint and get a prediction result:
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const result = await app.predict("/predict");
```
`predict` accepts two parameters, `endpoint` and `payload`. It returns a promise that resolves to the prediction result.
##### `endpoint`
This is the endpoint for an api request and is required. The default endpoint for a `gradio.Interface` is `"/predict"`. Explicitly named endpoints have a custom name. The endpoint names can be found on the "View API" page of a space.
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const result = await app.predict("/predict");
```
##### `payload`
The `payload` argument is generally required but this depends on the API itself. If the API endpoint depends on values being passed in then the argument is required for the API request to succeed. The data that should be passed in is detailed on the "View API" page of a space, or accessible via the `view_api()` method of the client.
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const result = await app.predict("/predict", {
input: 1,
word_1: "Hello",
word_2: "friends"
});
```
#### `submit`
The `submit` method provides a more flexible way to call an API endpoint, providing you with status updates about the current progress of the prediction as well as supporting more complex endpoint types.
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const submission = app.submit("/predict", { name: "Chewbacca" });
```
The `submit` method accepts the same [`endpoint`](#endpoint) and [`payload`](#payload) arguments as `predict`.
The `submit` method does not return a promise and should not be awaited, instead it returns an async iterator with a `cancel` method.
##### Accessing values
Iterating the submission allows you to access the events related to the submitted API request. There are two types of events that can be listened for: `"data"` updates and `"status"` updates. By default only the `"data"` event is reported, but you can listen for the `"status"` event by manually passing the events you care about when instantiating the client:
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name", {
events: ["data", "status"]
});
```
`"data"` updates are issued when the API computes a value, the callback provided as the second argument will be called when such a value is sent to the client. The shape of the data depends on the way the API itself is constructed. This event may fire more than once if that endpoint supports emmitting new values over time.
`"status` updates are issued when the status of a request changes. This information allows you to offer feedback to users when the queue position of the request changes, or when the request changes from queued to processing.
The status payload look like this:
```ts
interface Status {
queue: boolean;
code?: string;
success?: boolean;
stage: "pending" | "error" | "complete" | "generating";
size?: number;
position?: number;
eta?: number;
message?: string;
progress_data?: Array<{
progress: number | null;
index: number | null;
length: number | null;
unit: string | null;
desc: string | null;
}>;
time?: Date;
}
```
Usage looks like this:
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const submission = app
.submit("/predict", { name: "Chewbacca" })
for await (const msg of submission) {
if (msg.type === "data") {
console.log(msg.data);
}
if (msg.type === "status") {
console.log(msg);
}
}
```
##### `cancel`
Certain types of gradio function can run repeatedly and in some cases indefinitely. the `cancel` method will stop such an endpoints and prevent the API from issuing additional updates.
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const submission = app
.submit("/predict", { name: "Chewbacca" })
// later
submission.cancel();
```
#### `view_api`
The `view_api` method provides details about the API you are connected to. It returns a JavaScript object of all named endpoints, unnamed endpoints and what values they accept and return. This method does not accept arguments.
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
const api_info = await app.view_api();
console.log(api_info);
```
#### `config`
The `config` property contains the configuration for the gradio application you are connected to. This object may contain useful meta information about the application.
```ts
import { Client } from "@gradio/client";
const app = await Client.connect("user/space-name");
console.log(app.config);
```
### `duplicate`
The duplicate function will attempt to duplicate the space that is referenced and return an instance of `client` connected to that space. If the space has already been duplicated then it will not create a new duplicate and will instead connect to the existing duplicated space. The huggingface token that is passed in will dictate the user under which the space is created.
`duplicate` accepts the same arguments as `client` with the addition of a `private` options property dictating whether the duplicated space should be private or public. A huggingface token is required for duplication to work.
```ts
import { Client } from "@gradio/client";
const app = await Client.duplicate("user/space-name", {
hf_token: "hf_..."
});
```
This function accepts two arguments: `source` and `options`:
#### `source`
The space to duplicate and connect to. [See `client`'s `source` parameter](#source).
#### `options`
Accepts all options that `client` accepts, except `hf_token` is required. [See `client`'s `options` parameter](#source).
`duplicate` also accepts one additional `options` property.
##### `private`
This is an optional property specific to `duplicate`'s options object and will determine whether the space should be public or private. Spaces duplicated via the `duplicate` method are public by default.
```ts
import { Client } from "@gradio/client";
const app = await Client.duplicate("user/space-name", {
hf_token: "hf_...",
private: true
});
```
##### `timeout`
This is an optional property specific to `duplicate`'s options object and will set the timeout in minutes before the duplicated space will go to sleep.
```ts
import { Client } from "@gradio/client";
const app = await Client.duplicate("user/space-name", {
hf_token: "hf_...",
private: true,
timeout: 5
});
```
##### `hardware`
This is an optional property specific to `duplicate`'s options object and will set the hardware for the duplicated space. By default the hardware used will match that of the original space. If this cannot be obtained it will default to `"cpu-basic"`. For hardware upgrades (beyond the basic CPU tier), you may be required to provide [billing information on Hugging Face](https://huggingface.co/settings/billing).
Possible hardware options are:
- `"cpu-basic"`
- `"cpu-upgrade"`
- `"cpu-xl"`
- `"t4-small"`
- `"t4-medium"`
- `"a10g-small"`
- `"a10g-large"`
- `"a10g-largex2"`
- `"a10g-largex4"`
- `"a100-large"`
- `"zero-a10g"`
- `"h100"`
- `"h100x8"`
```ts
import { Client } from "@gradio/client";
const app = await Client.duplicate("user/space-name", {
hf_token: "hf_...",
private: true,
hardware: "a10g-small"
});
```
### `handle_file(file_or_url: File | string | Blob | Buffer)`
This utility function is used to simplify the process of handling file inputs for the client.
Gradio APIs expect a special file datastructure that references a location on the server. These files can be manually uploaded but figuring what to do with different file types can be difficult depending on your environment.
This function will handle files regardless of whether or not they are local files (node only), URLs, Blobs, or Buffers. It will take in a reference and handle it accordingly,uploading the file where appropriate and generating the correct data structure for the client.
The return value of this function can be used anywhere in the input data where a file is expected:
```ts
import { handle_file } from "@gradio/client";
const app = await Client.connect("user/space-name");
const result = await app.predict("/predict", {
single: handle_file(file),
flat: [handle_file(url), handle_file(buffer)],
nested: {
image: handle_file(url),
layers: [handle_file(buffer)]
},
deeply_nested: {
image: handle_file(url),
layers: [{
layer1: handle_file(buffer),
layer2: handle_file(buffer)
}]
}
});
```
#### filepaths
`handle_file` can be passed a local filepath which it will upload to the client server and return a reference that the client can understand.
This only works in a node environment.
Filepaths are resolved relative to the current working directory, not the location of the file that calls `handle_file`.
```ts
import { handle_file } from "@gradio/client";
// not uploaded yet
const file_ref = handle_file("path/to/file");
const app = await Client.connect("user/space-name");
// upload happens here
const result = await app.predict("/predict", {
file: file_ref,
});
```
#### URLs
`handle_file` can be passed a URL which it will convert into a reference that the client can understand.
```ts
import { handle_file } from "@gradio/client";
const url_ref = handle_file("https://example.com/file.png");
const app = await Client.connect("user/space-name");
const result = await app.predict("/predict", {
url: url_ref,
});
```
#### Blobs
`handle_file` can be passed a Blob which it will upload to the client server and return a reference that the client can understand.
The upload is not initiated until predict or submit are called.
```ts
import { handle_file } from "@gradio/client";
// not uploaded yet
const blob_ref = handle_file(new Blob(["Hello, world!"]));
const app = await Client.connect("user/space-name");
// upload happens here
const result = await app.predict("/predict", {
blob: blob_ref,
});
```
#### Buffers
`handle_file` can be passed a Buffer which it will upload to the client server and return a reference that the client can understand.
```ts
import { handle_file } from "@gradio/client";
import { readFileSync } from "fs";
// not uploaded yet
const buffer_ref = handle_file(readFileSync("file.png"));
const app = await Client.connect("user/space-name");
// upload happens here
const result = await app.predict("/predict", {
buffer: buffer_ref,
});
```

Binary file not shown.

View File

@ -0,0 +1,39 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Client</title>
<script type="module">
import { Client } from "./dist/index.js";
console.log(Client);
const client = await Client.connect("pngwn/chatinterface_streaming_echo");
async function run(message, n) {
// console.log(client);
const req = client.submit("/chat", {
message
});
console.log("start");
for await (const c of req) {
if (c.type === "data") {
console.log(`${n}: ${c.data[0]}`);
}
}
console.log("end");
return "hi";
}
run("My name is frank", 1);
run("Hello there", 2);
console.log("boo");
</script>
</head>
<body>
<div id="app"></div>
</body>
</html>

View File

@ -0,0 +1,49 @@
{
"name": "@gradio/client",
"version": "1.15.4",
"description": "Gradio API client",
"type": "module",
"main": "dist/index.js",
"author": "",
"license": "ISC",
"exports": {
".": {
"gradio": "./src/index.ts",
"import": "./dist/index.js"
},
"./package.json": "./package.json"
},
"dependencies": {
"@types/eventsource": "^1.1.15",
"bufferutil": "^4.0.7",
"eventsource": "^2.0.2",
"fetch-event-stream": "^0.1.5",
"msw": "^2.2.1",
"semiver": "^1.1.0",
"textlinestream": "^1.1.1",
"typescript": "^5.0.0",
"ws": "^8.13.0"
},
"devDependencies": {
"@types/ws": "^8.5.10",
"esbuild": "^0.21.0"
},
"scripts": {
"bundle": "vite build --ssr",
"generate_types": "tsc",
"build": "pnpm bundle && pnpm generate_types",
"test": "pnpm test:client && pnpm test:client:node",
"test:client": "vitest run -c vite.config.js",
"test:client:node": "TEST_MODE=node vitest run -c vite.config.js",
"preview:browser": "vite dev --mode=preview"
},
"engines": {
"node": ">=18.0.0"
},
"main_changeset": true,
"repository": {
"type": "git",
"url": "git+https://github.com/gradio-app/gradio.git",
"directory": "client/js"
}
}

View File

@ -0,0 +1,586 @@
import type {
ApiData,
ApiInfo,
ClientOptions,
Config,
DuplicateOptions,
EndpointInfo,
JsApiData,
PredictReturn,
SpaceStatus,
Status,
UploadResponse,
client_return,
SubmitIterable,
GradioEvent
} from "./types";
import { view_api } from "./utils/view_api";
import { upload_files } from "./utils/upload_files";
import { upload, FileData } from "./upload";
import { handle_blob } from "./utils/handle_blob";
import { post_data } from "./utils/post_data";
import { predict } from "./utils/predict";
import { duplicate } from "./utils/duplicate";
import { submit } from "./utils/submit";
import { RE_SPACE_NAME, process_endpoint } from "./helpers/api_info";
import {
map_names_to_ids,
resolve_cookies,
resolve_config,
get_jwt,
parse_and_set_cookies
} from "./helpers/init_helpers";
import { check_and_wake_space, check_space_status } from "./helpers/spaces";
import { open_stream, readable_stream, close_stream } from "./utils/stream";
import {
API_INFO_ERROR_MSG,
CONFIG_ERROR_MSG,
HEARTBEAT_URL,
COMPONENT_SERVER_URL
} from "./constants";
export class Client {
app_reference: string;
options: ClientOptions;
deep_link: string | null = null;
config: Config | undefined;
api_prefix = "";
api_info: ApiInfo<JsApiData> | undefined;
api_map: Record<string, number> = {};
session_hash: string = Math.random().toString(36).substring(2);
jwt: string | false = false;
last_status: Record<string, Status["stage"]> = {};
private cookies: string | null = null;
// streaming
stream_status = { open: false };
closed = false;
pending_stream_messages: Record<string, any[][]> = {};
pending_diff_streams: Record<string, any[][]> = {};
event_callbacks: Record<string, (data?: unknown) => Promise<void>> = {};
unclosed_events: Set<string> = new Set();
heartbeat_event: EventSource | null = null;
abort_controller: AbortController | null = null;
stream_instance: EventSource | null = null;
current_payload: any;
ws_map: Record<string, WebSocket | "pending" | "failed" | "closed"> = {};
get_url_config(url: string | null = null): Config {
if (!this.config) {
throw new Error(CONFIG_ERROR_MSG);
}
if (url === null) {
url = window.location.href;
}
const stripSlashes = (str: string): string => str.replace(/^\/+|\/+$/g, "");
let root_path = stripSlashes(new URL(this.config.root).pathname);
let url_path = stripSlashes(new URL(url).pathname);
let page: string;
if (!url_path.startsWith(root_path)) {
page = "";
} else {
page = stripSlashes(url_path.substring(root_path.length));
}
return this.get_page_config(page);
}
get_page_config(page: string): Config {
if (!this.config) {
throw new Error(CONFIG_ERROR_MSG);
}
let config = this.config;
if (!(page in config.page)) {
page = "";
}
return {
...config,
current_page: page,
layout: config.page[page].layout,
components: config.components.filter((c) =>
config.page[page].components.includes(c.id)
),
dependencies: this.config.dependencies.filter((d) =>
config.page[page].dependencies.includes(d.id)
)
};
}
fetch(input: RequestInfo | URL, init?: RequestInit): Promise<Response> {
const headers = new Headers(init?.headers || {});
if (this && this.cookies) {
headers.append("Cookie", this.cookies);
}
if (this && this.options.headers) {
for (const name in this.options.headers) {
headers.append(name, this.options.headers[name]);
}
}
return fetch(input, { ...init, headers });
}
stream(url: URL): EventSource {
const headers = new Headers();
if (this && this.cookies) {
headers.append("Cookie", this.cookies);
}
if (this && this.options.headers) {
for (const name in this.options.headers) {
headers.append(name, this.options.headers[name]);
}
}
if (this && this.options.hf_token) {
headers.append("Authorization", `Bearer ${this.options.hf_token}`);
}
this.abort_controller = new AbortController();
this.stream_instance = readable_stream(url.toString(), {
credentials: "include",
headers: headers,
signal: this.abort_controller.signal
});
return this.stream_instance;
}
view_api: () => Promise<ApiInfo<JsApiData>>;
upload_files: (
root_url: string,
files: (Blob | File)[],
upload_id?: string
) => Promise<UploadResponse>;
upload: (
file_data: FileData[],
root_url: string,
upload_id?: string,
max_file_size?: number
) => Promise<(FileData | null)[] | null>;
handle_blob: (
endpoint: string,
data: unknown[],
endpoint_info: EndpointInfo<ApiData | JsApiData>
) => Promise<unknown[]>;
post_data: (
url: string,
body: unknown,
additional_headers?: any
) => Promise<unknown[]>;
submit: (
endpoint: string | number,
data: unknown[] | Record<string, unknown> | undefined,
event_data?: unknown,
trigger_id?: number | null,
all_events?: boolean
) => SubmitIterable<GradioEvent>;
predict: (
endpoint: string | number,
data: unknown[] | Record<string, unknown> | undefined,
event_data?: unknown
) => Promise<PredictReturn>;
open_stream: () => Promise<void>;
private resolve_config: (endpoint: string) => Promise<Config | undefined>;
private resolve_cookies: () => Promise<void>;
constructor(
app_reference: string,
options: ClientOptions = { events: ["data"] }
) {
this.app_reference = app_reference;
this.deep_link = options.query_params?.deep_link || null;
if (!options.events) {
options.events = ["data"];
}
this.options = options;
this.current_payload = {};
this.view_api = view_api.bind(this);
this.upload_files = upload_files.bind(this);
this.handle_blob = handle_blob.bind(this);
this.post_data = post_data.bind(this);
this.submit = submit.bind(this);
this.predict = predict.bind(this);
this.open_stream = open_stream.bind(this);
this.resolve_config = resolve_config.bind(this);
this.resolve_cookies = resolve_cookies.bind(this);
this.upload = upload.bind(this);
this.fetch = this.fetch.bind(this);
this.handle_space_success = this.handle_space_success.bind(this);
this.stream = this.stream.bind(this);
}
private async init(): Promise<void> {
if (
(typeof window === "undefined" || !("WebSocket" in window)) &&
!global.WebSocket
) {
const ws = await import("ws");
global.WebSocket = ws.WebSocket as unknown as typeof WebSocket;
}
if (this.options.auth) {
await this.resolve_cookies();
}
await this._resolve_config().then(({ config }) =>
this._resolve_hearbeat(config)
);
this.api_info = await this.view_api();
this.api_map = map_names_to_ids(this.config?.dependencies || []);
}
async _resolve_hearbeat(_config: Config): Promise<void> {
if (_config) {
this.config = _config;
this.api_prefix = _config.api_prefix || "";
if (this.config && this.config.connect_heartbeat) {
if (this.config.space_id && this.options.hf_token) {
this.jwt = await get_jwt(
this.config.space_id,
this.options.hf_token,
this.cookies
);
}
}
}
if (_config.space_id && this.options.hf_token) {
this.jwt = await get_jwt(_config.space_id, this.options.hf_token);
}
if (this.config && this.config.connect_heartbeat) {
// connect to the heartbeat endpoint via GET request
const heartbeat_url = new URL(
`${this.config.root}${this.api_prefix}/${HEARTBEAT_URL}/${this.session_hash}`
);
// if the jwt is available, add it to the query params
if (this.jwt) {
heartbeat_url.searchParams.set("__sign", this.jwt);
}
// Just connect to the endpoint without parsing the response. Ref: https://github.com/gradio-app/gradio/pull/7974#discussion_r1557717540
if (!this.heartbeat_event) {
this.heartbeat_event = this.stream(heartbeat_url);
}
}
}
static async connect(
app_reference: string,
options: ClientOptions = {
events: ["data"]
}
): Promise<Client> {
const client = new this(app_reference, options); // this refers to the class itself, not the instance
if (options.session_hash) {
client.session_hash = options.session_hash;
}
await client.init();
return client;
}
close(): void {
this.closed = true;
close_stream(this.stream_status, this.abort_controller);
}
set_current_payload(payload: any): void {
this.current_payload = payload;
}
static async duplicate(
app_reference: string,
options: DuplicateOptions = {
events: ["data"]
}
): Promise<Client> {
return duplicate(app_reference, options);
}
private async _resolve_config(): Promise<any> {
const { http_protocol, host, space_id } = await process_endpoint(
this.app_reference,
this.options.hf_token
);
const { status_callback } = this.options;
if (space_id && status_callback) {
await check_and_wake_space(space_id, status_callback);
}
let config: Config | undefined;
try {
// Create base URL
let configUrl = `${http_protocol}//${host}`;
config = await this.resolve_config(configUrl);
if (!config) {
throw new Error(CONFIG_ERROR_MSG);
}
return this.config_success(config);
} catch (e: any) {
if (space_id && status_callback) {
check_space_status(
space_id,
RE_SPACE_NAME.test(space_id) ? "space_name" : "subdomain",
this.handle_space_success
);
} else {
if (status_callback)
status_callback({
status: "error",
message: "Could not load this space.",
load_status: "error",
detail: "NOT_FOUND"
});
throw Error(e);
}
}
}
private async config_success(
_config: Config
): Promise<Config | client_return> {
this.config = _config;
this.api_prefix = _config.api_prefix || "";
if (this.config.auth_required) {
return this.prepare_return_obj();
}
try {
this.api_info = await this.view_api();
} catch (e) {
console.error(API_INFO_ERROR_MSG + (e as Error).message);
}
return this.prepare_return_obj();
}
async handle_space_success(status: SpaceStatus): Promise<Config | void> {
if (!this) {
throw new Error(CONFIG_ERROR_MSG);
}
const { status_callback } = this.options;
if (status_callback) status_callback(status);
if (status.status === "running") {
try {
this.config = await this._resolve_config();
this.api_prefix = this?.config?.api_prefix || "";
if (!this.config) {
throw new Error(CONFIG_ERROR_MSG);
}
const _config = await this.config_success(this.config);
return _config as Config;
} catch (e) {
if (status_callback) {
status_callback({
status: "error",
message: "Could not load this space.",
load_status: "error",
detail: "NOT_FOUND"
});
}
throw e;
}
}
}
public async component_server(
component_id: number,
fn_name: string,
data: unknown[] | { binary: boolean; data: Record<string, any> }
): Promise<unknown> {
if (!this.config) {
throw new Error(CONFIG_ERROR_MSG);
}
const headers: {
Authorization?: string;
"Content-Type"?: "application/json";
} = {};
const { hf_token } = this.options;
const { session_hash } = this;
if (hf_token) {
headers.Authorization = `Bearer ${this.options.hf_token}`;
}
let root_url: string;
let component = this.config.components.find(
(comp) => comp.id === component_id
);
if (component?.props?.root_url) {
root_url = component.props.root_url;
} else {
root_url = this.config.root;
}
let body: FormData | string;
if ("binary" in data) {
body = new FormData();
for (const key in data.data) {
if (key === "binary") continue;
body.append(key, data.data[key]);
}
body.set("component_id", component_id.toString());
body.set("fn_name", fn_name);
body.set("session_hash", session_hash);
} else {
body = JSON.stringify({
data: data,
component_id,
fn_name,
session_hash
});
headers["Content-Type"] = "application/json";
}
if (hf_token) {
headers.Authorization = `Bearer ${hf_token}`;
}
try {
const response = await this.fetch(
`${root_url}${this.api_prefix}/${COMPONENT_SERVER_URL}/`,
{
method: "POST",
body: body,
headers,
credentials: "include"
}
);
if (!response.ok) {
throw new Error(
"Could not connect to component server: " + response.statusText
);
}
const output = await response.json();
return output;
} catch (e) {
console.warn(e);
}
}
public set_cookies(raw_cookies: string): void {
this.cookies = parse_and_set_cookies(raw_cookies).join("; ");
}
private prepare_return_obj(): client_return {
return {
config: this.config,
predict: this.predict,
submit: this.submit,
view_api: this.view_api,
component_server: this.component_server
};
}
private async connect_ws(url: string): Promise<void> {
return new Promise((resolve, reject) => {
let ws;
try {
ws = new WebSocket(url);
} catch (e) {
this.ws_map[url] = "failed";
return;
}
this.ws_map[url] = "pending";
ws.onopen = () => {
this.ws_map[url] = ws;
resolve();
};
ws.onerror = (error) => {
console.error("WebSocket error:", error);
this.close_ws(url);
this.ws_map[url] = "failed";
resolve();
};
ws.onclose = () => {
this.ws_map[url] = "closed";
};
ws.onmessage = (event) => {};
});
}
async send_ws_message(url: string, data: any): Promise<void> {
// connect if not connected
if (!(url in this.ws_map)) {
await this.connect_ws(url);
} else if (
this.ws_map[url] === "pending" ||
this.ws_map[url] === "closed" ||
this.ws_map[url] === "failed"
) {
return;
}
const ws = this.ws_map[url];
if (ws instanceof WebSocket) {
ws.send(JSON.stringify(data));
} else {
this.post_data(url, data);
}
}
async close_ws(url: string): Promise<void> {
if (url in this.ws_map) {
const ws = this.ws_map[url];
if (ws instanceof WebSocket) {
ws.close();
delete this.ws_map[url];
}
}
}
}
/**
* @deprecated This method will be removed in v1.0. Use `Client.connect()` instead.
* Creates a client instance for interacting with Gradio apps.
*
* @param {string} app_reference - The reference or URL to a Gradio space or app.
* @param {ClientOptions} options - Configuration options for the client.
* @returns {Promise<Client>} A promise that resolves to a `Client` instance.
*/
export async function client(
app_reference: string,
options: ClientOptions = {
events: ["data"]
}
): Promise<Client> {
return await Client.connect(app_reference, options);
}
/**
* @deprecated This method will be removed in v1.0. Use `Client.duplicate()` instead.
* Creates a duplicate of a space and returns a client instance for the duplicated space.
*
* @param {string} app_reference - The reference or URL to a Gradio space or app to duplicate.
* @param {DuplicateOptions} options - Configuration options for the client.
* @returns {Promise<Client>} A promise that resolves to a `Client` instance.
*/
export async function duplicate_space(
app_reference: string,
options: DuplicateOptions
): Promise<Client> {
return await Client.duplicate(app_reference, options);
}
export type ClientInstance = Client;

View File

@ -0,0 +1,40 @@
// endpoints
export const HOST_URL = `host`;
export const API_URL = `predict/`;
export const SSE_URL_V0 = `queue/join`;
export const SSE_DATA_URL_V0 = `queue/data`;
export const SSE_URL = `queue/data`;
export const SSE_DATA_URL = `queue/join`;
export const UPLOAD_URL = `upload`;
export const LOGIN_URL = `login`;
export const CONFIG_URL = `config`;
export const API_INFO_URL = `info`;
export const RUNTIME_URL = `runtime`;
export const SLEEPTIME_URL = `sleeptime`;
export const HEARTBEAT_URL = `heartbeat`;
export const COMPONENT_SERVER_URL = `component_server`;
export const RESET_URL = `reset`;
export const CANCEL_URL = `cancel`;
export const RAW_API_INFO_URL = `info?serialize=False`;
export const SPACE_FETCHER_URL =
"https://gradio-space-api-fetcher-v2.hf.space/api";
export const SPACE_URL = "https://hf.space/{}";
// messages
export const QUEUE_FULL_MSG =
"This application is currently busy. Please try again. ";
export const BROKEN_CONNECTION_MSG = "Connection errored out. ";
export const CONFIG_ERROR_MSG = "Could not resolve app config. ";
export const SPACE_STATUS_ERROR_MSG = "Could not get space status. ";
export const API_INFO_ERROR_MSG = "Could not get API info. ";
export const SPACE_METADATA_ERROR_MSG = "Space metadata could not be loaded. ";
export const INVALID_URL_MSG = "Invalid URL. A full URL path is required.";
export const UNAUTHORIZED_MSG = "Not authorized to access this space. ";
export const INVALID_CREDENTIALS_MSG = "Invalid credentials. Could not login. ";
export const MISSING_CREDENTIALS_MSG =
"Login credentials are required to access this space.";
export const NODEJS_FS_ERROR_MSG =
"File system access is only available in Node.js environments";
export const ROOT_URL_ERROR_MSG = "Root URL not found in client config";
export const FILE_PROCESSING_ERROR_MSG = "Error uploading file";

View File

@ -0,0 +1,12 @@
import { ApiData, ApiInfo, Config } from "./types";
declare global {
interface Window {
__gradio_mode__: "app" | "website";
gradio_config: Config;
gradio_api_info: ApiInfo<ApiData> | { api: ApiInfo<ApiData> };
__is_colab__: boolean;
__gradio_space__: string | null;
supports_zerogpu_headers?: boolean;
}
}

View File

@ -0,0 +1,464 @@
import {
HOST_URL,
INVALID_URL_MSG,
QUEUE_FULL_MSG,
SPACE_METADATA_ERROR_MSG
} from "../constants";
import type {
ApiData,
ApiInfo,
Config,
JsApiData,
EndpointInfo,
Status
} from "../types";
import { determine_protocol } from "./init_helpers";
export const RE_SPACE_NAME = /^[a-zA-Z0-9_\-\.]+\/[a-zA-Z0-9_\-\.]+$/;
export const RE_SPACE_DOMAIN = /.*hf\.space\/{0,1}.*$/;
export async function process_endpoint(
app_reference: string,
hf_token?: `hf_${string}`
): Promise<{
space_id: string | false;
host: string;
ws_protocol: "ws" | "wss";
http_protocol: "http:" | "https:";
}> {
const headers: { Authorization?: string } = {};
if (hf_token) {
headers.Authorization = `Bearer ${hf_token}`;
}
const _app_reference = app_reference.trim().replace(/\/$/, "");
if (RE_SPACE_NAME.test(_app_reference)) {
// app_reference is a HF space name
try {
const res = await fetch(
`https://huggingface.co/api/spaces/${_app_reference}/${HOST_URL}`,
{ headers }
);
const _host = (await res.json()).host;
return {
space_id: app_reference,
...determine_protocol(_host)
};
} catch (e) {
throw new Error(SPACE_METADATA_ERROR_MSG);
}
}
if (RE_SPACE_DOMAIN.test(_app_reference)) {
// app_reference is a direct HF space domain
const { ws_protocol, http_protocol, host } =
determine_protocol(_app_reference);
return {
space_id: host.split("/")[0].replace(".hf.space", ""),
ws_protocol,
http_protocol,
host
};
}
return {
space_id: false,
...determine_protocol(_app_reference)
};
}
export const join_urls = (...urls: string[]): string => {
try {
return urls.reduce((base_url: string, part: string) => {
base_url = base_url.replace(/\/+$/, "");
part = part.replace(/^\/+/, "");
return new URL(part, base_url + "/").toString();
});
} catch (e) {
throw new Error(INVALID_URL_MSG);
}
};
export function transform_api_info(
api_info: ApiInfo<ApiData>,
config: Config,
api_map: Record<string, number>
): ApiInfo<JsApiData> {
const transformed_info: ApiInfo<JsApiData> = {
named_endpoints: {},
unnamed_endpoints: {}
};
Object.keys(api_info).forEach((category) => {
if (category === "named_endpoints" || category === "unnamed_endpoints") {
transformed_info[category] = {};
Object.entries(api_info[category]).forEach(
([endpoint, { parameters, returns }]) => {
const dependencyIndex =
config.dependencies.find(
(dep) =>
dep.api_name === endpoint ||
dep.api_name === endpoint.replace("/", "")
)?.id ||
api_map[endpoint.replace("/", "")] ||
-1;
const dependencyTypes =
dependencyIndex !== -1
? config.dependencies.find((dep) => dep.id == dependencyIndex)
?.types
: { generator: false, cancel: false };
if (
dependencyIndex !== -1 &&
config.dependencies.find((dep) => dep.id == dependencyIndex)?.inputs
?.length !== parameters.length
) {
const components = config.dependencies
.find((dep) => dep.id == dependencyIndex)!
.inputs.map(
(input) => config.components.find((c) => c.id === input)?.type
);
try {
components.forEach((comp, idx) => {
if (comp === "state") {
const new_param = {
component: "state",
example: null,
parameter_default: null,
parameter_has_default: true,
parameter_name: null,
hidden: true
};
// @ts-ignore
parameters.splice(idx, 0, new_param);
}
});
} catch (e) {
console.error(e);
}
}
const transform_type = (
data: ApiData,
component: string,
serializer: string,
signature_type: "return" | "parameter"
): JsApiData => ({
...data,
description: get_description(data?.type, serializer),
type:
get_type(data?.type, component, serializer, signature_type) || ""
});
transformed_info[category][endpoint] = {
parameters: parameters.map((p: ApiData) =>
transform_type(p, p?.component, p?.serializer, "parameter")
),
returns: returns.map((r: ApiData) =>
transform_type(r, r?.component, r?.serializer, "return")
),
type: dependencyTypes
};
}
);
}
});
return transformed_info;
}
export function get_type(
type: { type: any; description: string },
component: string,
serializer: string,
signature_type: "return" | "parameter"
): string | undefined {
if (component === "Api") return type.type;
switch (type?.type) {
case "string":
return "string";
case "boolean":
return "boolean";
case "number":
return "number";
}
if (
serializer === "JSONSerializable" ||
serializer === "StringSerializable"
) {
return "any";
} else if (serializer === "ListStringSerializable") {
return "string[]";
} else if (component === "Image") {
return signature_type === "parameter" ? "Blob | File | Buffer" : "string";
} else if (serializer === "FileSerializable") {
if (type?.type === "array") {
return signature_type === "parameter"
? "(Blob | File | Buffer)[]"
: `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}[]`;
}
return signature_type === "parameter"
? "Blob | File | Buffer"
: `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}`;
} else if (serializer === "GallerySerializable") {
return signature_type === "parameter"
? "[(Blob | File | Buffer), (string | null)][]"
: `[{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}, (string | null))][]`;
}
}
export function get_description(
type: { type: any; description: string },
serializer: string
): string {
if (serializer === "GallerySerializable") {
return "array of [file, label] tuples";
} else if (serializer === "ListStringSerializable") {
return "array of strings";
} else if (serializer === "FileSerializable") {
return "array of files or single file";
}
return type?.description;
}
/* eslint-disable complexity */
export function handle_message(
data: any,
last_status: Status["stage"]
): {
type:
| "hash"
| "data"
| "update"
| "complete"
| "generating"
| "log"
| "none"
| "heartbeat"
| "streaming"
| "unexpected_error";
data?: any;
status?: Status;
original_msg?: string;
} {
const queue = true;
switch (data.msg) {
case "send_data":
return { type: "data" };
case "send_hash":
return { type: "hash" };
case "queue_full":
return {
type: "update",
status: {
queue,
message: QUEUE_FULL_MSG,
stage: "error",
code: data.code,
success: data.success
}
};
case "heartbeat":
return {
type: "heartbeat"
};
case "unexpected_error":
return {
type: "unexpected_error",
status: {
queue,
message: data.message,
stage: "error",
success: false
}
};
case "estimation":
return {
type: "update",
status: {
queue,
stage: last_status || "pending",
code: data.code,
size: data.queue_size,
position: data.rank,
eta: data.rank_eta,
success: data.success
}
};
case "progress":
return {
type: "update",
status: {
queue,
stage: "pending",
code: data.code,
progress_data: data.progress_data,
success: data.success
}
};
case "log":
return { type: "log", data: data };
case "process_generating":
return {
type: "generating",
status: {
queue,
message: !data.success ? data.output.error : null,
stage: data.success ? "generating" : "error",
code: data.code,
progress_data: data.progress_data,
eta: data.average_duration,
changed_state_ids: data.success
? data.output.changed_state_ids
: undefined
},
data: data.success ? data.output : null
};
case "process_streaming":
return {
type: "streaming",
status: {
queue,
message: data.output.error,
stage: "streaming",
time_limit: data.time_limit,
code: data.code,
progress_data: data.progress_data,
eta: data.eta
},
data: data.output
};
case "process_completed":
if ("error" in data.output) {
return {
type: "update",
status: {
queue,
title: data.output.title as string,
message: data.output.error as string,
visible: data.output.visible as boolean,
duration: data.output.duration as number,
stage: "error",
code: data.code,
success: data.success
}
};
}
return {
type: "complete",
status: {
queue,
message: !data.success ? data.output.error : undefined,
stage: data.success ? "complete" : "error",
code: data.code,
progress_data: data.progress_data,
changed_state_ids: data.success
? data.output.changed_state_ids
: undefined
},
data: data.success ? data.output : null
};
case "process_starts":
return {
type: "update",
status: {
queue,
stage: "pending",
code: data.code,
size: data.rank,
position: 0,
success: data.success,
eta: data.eta
},
original_msg: "process_starts"
};
}
return { type: "none", status: { stage: "error", queue } };
}
/* eslint-enable complexity */
/**
* Maps the provided `data` to the parameters defined by the `/info` endpoint response.
* This allows us to support both positional and keyword arguments passed to the client
* and ensures that all parameters are either directly provided or have default values assigned.
*
* @param {unknown[] | Record<string, unknown>} data - The input data for the function,
* which can be either an array of values for positional arguments or an object
* with key-value pairs for keyword arguments.
* @param {JsApiData[]} parameters - Array of parameter descriptions retrieved from the
* `/info` endpoint.
*
* @returns {unknown[]} - Returns an array of resolved data where each element corresponds
* to the expected parameter from the API. The `parameter_default` value is used where
* a value is not provided for a parameter, and optional parameters without defaults are
* set to `undefined`.
*
* @throws {Error} - Throws an error:
* - If more arguments are provided than are defined in the parameters.
* * - If no parameter value is provided for a required parameter and no default value is defined.
* - If an argument is provided that does not match any defined parameter.
*/
export const map_data_to_params = (
data: unknown[] | Record<string, unknown> = [],
endpoint_info: EndpointInfo<JsApiData | ApiData>
): unknown[] => {
// Workaround for the case where the endpoint_info is undefined
// See https://github.com/gradio-app/gradio/pull/8820#issuecomment-2237381761
const parameters = endpoint_info ? endpoint_info.parameters : [];
if (Array.isArray(data)) {
if (
endpoint_info &&
parameters.length > 0 &&
data.length > parameters.length
) {
console.warn("Too many arguments provided for the endpoint.");
}
return data;
}
const resolved_data: unknown[] = [];
const provided_keys = Object.keys(data);
parameters.forEach((param, index) => {
if (data.hasOwnProperty(param.parameter_name)) {
resolved_data[index] = data[param.parameter_name];
} else if (param.parameter_has_default) {
resolved_data[index] = param.parameter_default;
} else {
throw new Error(
`No value provided for required parameter: ${param.parameter_name}`
);
}
});
provided_keys.forEach((key) => {
if (!parameters.some((param) => param.parameter_name === key)) {
throw new Error(
`Parameter \`${key}\` is not a valid keyword argument. Please refer to the API for usage.`
);
}
});
resolved_data.forEach((value, idx) => {
if (value === undefined && !parameters[idx].parameter_has_default) {
throw new Error(
`No value provided for required parameter: ${parameters[idx].parameter_name}`
);
}
});
return resolved_data;
};

View File

@ -0,0 +1,221 @@
import {
type ApiData,
type BlobRef,
type Config,
type EndpointInfo,
type JsApiData,
type DataType,
Command,
type Dependency,
type ComponentMeta
} from "../types";
import { FileData } from "../upload";
const is_node =
typeof process !== "undefined" && process.versions && process.versions.node;
export function update_object(
object: { [x: string]: any },
newValue: any,
stack: (string | number)[]
): void {
while (stack.length > 1) {
const key = stack.shift();
if (typeof key === "string" || typeof key === "number") {
object = object[key];
} else {
throw new Error("Invalid key type");
}
}
const key = stack.shift();
if (typeof key === "string" || typeof key === "number") {
object[key] = newValue;
} else {
throw new Error("Invalid key type");
}
}
export async function walk_and_store_blobs(
data: DataType,
type: string | undefined = undefined,
path: string[] = [],
root = false,
endpoint_info: EndpointInfo<ApiData | JsApiData> | undefined = undefined
): Promise<BlobRef[]> {
if (Array.isArray(data)) {
let blob_refs: BlobRef[] = [];
await Promise.all(
data.map(async (_, index) => {
let new_path = path.slice();
new_path.push(String(index));
const array_refs = await walk_and_store_blobs(
data[index],
root
? endpoint_info?.parameters[index]?.component || undefined
: type,
new_path,
false,
endpoint_info
);
blob_refs = blob_refs.concat(array_refs);
})
);
return blob_refs;
} else if (
(globalThis.Buffer && data instanceof globalThis.Buffer) ||
data instanceof Blob
) {
return [
{
path: path,
blob: new Blob([data]),
type
}
];
} else if (typeof data === "object" && data !== null) {
let blob_refs: BlobRef[] = [];
for (const key of Object.keys(data) as (keyof typeof data)[]) {
const new_path = [...path, key];
const value = data[key];
blob_refs = blob_refs.concat(
await walk_and_store_blobs(
value,
undefined,
new_path,
false,
endpoint_info
)
);
}
return blob_refs;
}
return [];
}
export function skip_queue(id: number, config: Config): boolean {
let fn_queue = config?.dependencies?.find((dep) => dep.id == id)?.queue;
if (fn_queue != null) {
return !fn_queue;
}
return !config.enable_queue;
}
// todo: add jsdoc for this function
export function post_message<Res = any>(
message: any,
origin: string
): Promise<Res> {
return new Promise((res, _rej) => {
const channel = new MessageChannel();
channel.port1.onmessage = (({ data }) => {
channel.port1.close();
res(data);
}) as (ev: MessageEvent<Res>) => void;
window.parent.postMessage(message, origin, [channel.port2]);
});
}
export function handle_file(
file_or_url: File | string | Blob | Buffer
): FileData | Blob | Command {
if (typeof file_or_url === "string") {
if (
file_or_url.startsWith("http://") ||
file_or_url.startsWith("https://")
) {
return {
path: file_or_url,
url: file_or_url,
orig_name: file_or_url.split("/").pop() ?? "unknown",
meta: { _type: "gradio.FileData" }
};
}
if (is_node) {
// Handle local file paths
return new Command("upload_file", {
path: file_or_url,
name: file_or_url,
orig_path: file_or_url
});
}
} else if (typeof File !== "undefined" && file_or_url instanceof File) {
return new Blob([file_or_url]);
} else if (file_or_url instanceof Buffer) {
return new Blob([file_or_url]);
} else if (file_or_url instanceof Blob) {
return file_or_url;
}
throw new Error(
"Invalid input: must be a URL, File, Blob, or Buffer object."
);
}
/**
* Handles the payload by filtering out state inputs and returning an array of resolved payload values.
* We send null values for state inputs to the server, but we don't want to include them in the resolved payload.
*
* @param resolved_payload - The resolved payload values received from the client or the server
* @param dependency - The dependency object.
* @param components - The array of component metadata.
* @param with_null_state - Optional. Specifies whether to include null values for state inputs. Default is false.
* @returns An array of resolved payload values, filtered based on the dependency and component metadata.
*/
export function handle_payload(
resolved_payload: unknown[],
dependency: Dependency,
components: ComponentMeta[],
type: "input" | "output",
with_null_state = false
): unknown[] {
if (type === "input" && !with_null_state) {
throw new Error("Invalid code path. Cannot skip state inputs for input.");
}
// data comes from the server with null state values so we skip
if (type === "output" && with_null_state) {
return resolved_payload;
}
let updated_payload: unknown[] = [];
let payload_index = 0;
const deps = type === "input" ? dependency.inputs : dependency.outputs;
for (let i = 0; i < deps.length; i++) {
const input_id = deps[i];
const component = components.find((c) => c.id === input_id);
if (component?.type === "state") {
// input + with_null_state needs us to fill state with null values
if (with_null_state) {
if (resolved_payload.length === deps.length) {
const value = resolved_payload[payload_index];
updated_payload.push(value);
payload_index++;
} else {
updated_payload.push(null);
}
} else {
// this is output & !with_null_state, we skip state inputs
// the server payload always comes with null state values so we move along the payload index
payload_index++;
continue;
}
// input & !with_null_state isn't a case we care about, server needs null
continue;
} else {
const value = resolved_payload[payload_index];
updated_payload.push(value);
payload_index++;
}
}
return updated_payload;
}

View File

@ -0,0 +1,222 @@
import type { Config } from "../types";
import {
CONFIG_ERROR_MSG,
CONFIG_URL,
INVALID_CREDENTIALS_MSG,
LOGIN_URL,
MISSING_CREDENTIALS_MSG,
SPACE_METADATA_ERROR_MSG,
UNAUTHORIZED_MSG
} from "../constants";
import { Client } from "..";
import { join_urls, process_endpoint } from "./api_info";
/**
* This function is used to resolve the URL for making requests when the app has a root path.
* The root path could be a path suffix like "/app" which is appended to the end of the base URL. Or
* it could be a full URL like "https://abidlabs-test-client-replica--gqf2x.hf.space" which is used when hosting
* Gradio apps on Hugging Face Spaces.
* @param {string} base_url The base URL at which the Gradio server is hosted
* @param {string} root_path The root path, which could be a path suffix (e.g. mounted in FastAPI app) or a full URL (e.g. hosted on Hugging Face Spaces)
* @param {boolean} prioritize_base Whether to prioritize the base URL over the root path. This is used when both the base path and root paths are full URLs. For example, for fetching files the root path should be prioritized, but for making requests, the base URL should be prioritized.
* @returns {string} the resolved URL
*/
export function resolve_root(
base_url: string,
root_path: string,
prioritize_base: boolean
): string {
if (root_path.startsWith("http://") || root_path.startsWith("https://")) {
return prioritize_base ? base_url : root_path;
}
return base_url + root_path;
}
export async function get_jwt(
space: string,
token: `hf_${string}`,
cookies?: string | null
): Promise<string | false> {
try {
const r = await fetch(`https://huggingface.co/api/spaces/${space}/jwt`, {
headers: {
Authorization: `Bearer ${token}`,
...(cookies ? { Cookie: cookies } : {})
}
});
const jwt = (await r.json()).token;
return jwt || false;
} catch (e) {
return false;
}
}
export function map_names_to_ids(
fns: Config["dependencies"]
): Record<string, number> {
let apis: Record<string, number> = {};
fns.forEach(({ api_name, id }) => {
if (api_name) apis[api_name] = id;
});
return apis;
}
export async function resolve_config(
this: Client,
endpoint: string
): Promise<Config | undefined> {
const headers: Record<string, string> = this.options.hf_token
? { Authorization: `Bearer ${this.options.hf_token}` }
: {};
headers["Content-Type"] = "application/json";
if (
typeof window !== "undefined" &&
window.gradio_config &&
location.origin !== "http://localhost:9876" &&
!window.gradio_config.dev_mode
) {
if (window.gradio_config.current_page) {
endpoint = endpoint.substring(0, endpoint.lastIndexOf("/"));
}
window.gradio_config.root = endpoint;
// @ts-ignore
return { ...window.gradio_config } as Config;
} else if (endpoint) {
let config_url = join_urls(
endpoint,
this.deep_link ? CONFIG_URL + "?deep_link=" + this.deep_link : CONFIG_URL
);
const response = await this.fetch(config_url, {
headers,
credentials: "include"
});
if (response?.status === 401 && !this.options.auth) {
throw new Error(MISSING_CREDENTIALS_MSG);
} else if (response?.status === 401 && this.options.auth) {
throw new Error(INVALID_CREDENTIALS_MSG);
}
if (response?.status === 200) {
let config = await response.json();
config.root = endpoint;
config.dependencies?.forEach((dep: any, i: number) => {
if (dep.id === undefined) {
dep.id = i;
}
});
return config;
} else if (response?.status === 401) {
throw new Error(UNAUTHORIZED_MSG);
}
throw new Error(CONFIG_ERROR_MSG);
}
throw new Error(CONFIG_ERROR_MSG);
}
export async function resolve_cookies(this: Client): Promise<void> {
const { http_protocol, host } = await process_endpoint(
this.app_reference,
this.options.hf_token
);
try {
if (this.options.auth) {
const cookie_header = await get_cookie_header(
http_protocol,
host,
this.options.auth,
this.fetch,
this.options.hf_token
);
if (cookie_header) this.set_cookies(cookie_header);
}
} catch (e: unknown) {
throw Error((e as Error).message);
}
}
// separating this from client-bound resolve_cookies so that it can be used in duplicate
export async function get_cookie_header(
http_protocol: string,
host: string,
auth: [string, string],
_fetch: typeof fetch,
hf_token?: `hf_${string}`
): Promise<string | null> {
const formData = new FormData();
formData.append("username", auth?.[0]);
formData.append("password", auth?.[1]);
let headers: { Authorization?: string } = {};
if (hf_token) {
headers.Authorization = `Bearer ${hf_token}`;
}
const res = await _fetch(`${http_protocol}//${host}/${LOGIN_URL}`, {
headers,
method: "POST",
body: formData,
credentials: "include"
});
if (res.status === 200) {
return res.headers.get("set-cookie");
} else if (res.status === 401) {
throw new Error(INVALID_CREDENTIALS_MSG);
} else {
throw new Error(SPACE_METADATA_ERROR_MSG);
}
}
export function determine_protocol(endpoint: string): {
ws_protocol: "ws" | "wss";
http_protocol: "http:" | "https:";
host: string;
} {
if (endpoint.startsWith("http")) {
const { protocol, host, pathname } = new URL(endpoint);
return {
ws_protocol: protocol === "https:" ? "wss" : "ws",
http_protocol: protocol as "http:" | "https:",
host: host + (pathname !== "/" ? pathname : "")
};
} else if (endpoint.startsWith("file:")) {
// This case is only expected to be used for the Wasm mode (Gradio-lite),
// where users can create a local HTML file using it and open the page in a browser directly via the `file:` protocol.
return {
ws_protocol: "ws",
http_protocol: "http:",
host: "lite.local" // Special fake hostname only used for this case. This matches the hostname allowed in `is_self_host()` in `js/wasm/network/host.ts`.
};
}
// default to secure if no protocol is provided
return {
ws_protocol: "wss",
http_protocol: "https:",
host: new URL(endpoint).host
};
}
export const parse_and_set_cookies = (cookie_header: string): string[] => {
let cookies: string[] = [];
const parts = cookie_header.split(/,(?=\s*[^\s=;]+=[^\s=;]+)/);
parts.forEach((cookie) => {
const [cookie_name, cookie_value] = cookie.split(";")[0].split("=");
if (cookie_name && cookie_value) {
cookies.push(`${cookie_name.trim()}=${cookie_value.trim()}`);
}
});
return cookies;
};

View File

@ -0,0 +1,252 @@
import {
RUNTIME_URL,
SLEEPTIME_URL,
SPACE_STATUS_ERROR_MSG
} from "../constants";
import { RE_SPACE_NAME } from "./api_info";
import type { SpaceStatusCallback } from "../types";
export async function check_space_status(
id: string,
type: "subdomain" | "space_name",
status_callback: SpaceStatusCallback
): Promise<void> {
let endpoint =
type === "subdomain"
? `https://huggingface.co/api/spaces/by-subdomain/${id}`
: `https://huggingface.co/api/spaces/${id}`;
let response;
let _status;
try {
response = await fetch(endpoint);
_status = response.status;
if (_status !== 200) {
throw new Error();
}
response = await response.json();
} catch (e) {
status_callback({
status: "error",
load_status: "error",
message: SPACE_STATUS_ERROR_MSG,
detail: "NOT_FOUND"
});
return;
}
if (!response || _status !== 200) return;
const {
runtime: { stage },
id: space_name
} = response;
switch (stage) {
case "STOPPED":
case "SLEEPING":
status_callback({
status: "sleeping",
load_status: "pending",
message: "Space is asleep. Waking it up...",
detail: stage
});
setTimeout(() => {
check_space_status(id, type, status_callback);
}, 1000); // poll for status
break;
case "PAUSED":
status_callback({
status: "paused",
load_status: "error",
message:
"This space has been paused by the author. If you would like to try this demo, consider duplicating the space.",
detail: stage,
discussions_enabled: await discussions_enabled(space_name)
});
break;
case "RUNNING":
case "RUNNING_BUILDING":
status_callback({
status: "running",
load_status: "complete",
message: "Space is running.",
detail: stage
});
break;
case "BUILDING":
status_callback({
status: "building",
load_status: "pending",
message: "Space is building...",
detail: stage
});
setTimeout(() => {
check_space_status(id, type, status_callback);
}, 1000);
break;
case "APP_STARTING":
status_callback({
status: "starting",
load_status: "pending",
message: "Space is starting...",
detail: stage
});
setTimeout(() => {
check_space_status(id, type, status_callback);
}, 1000);
break;
default:
status_callback({
status: "space_error",
load_status: "error",
message: "This space is experiencing an issue.",
detail: stage,
discussions_enabled: await discussions_enabled(space_name)
});
break;
}
}
export const check_and_wake_space = async (
space_id: string,
status_callback: SpaceStatusCallback
): Promise<void> => {
let retries = 0;
const max_retries = 12;
const check_interval = 5000;
return new Promise((resolve) => {
check_space_status(
space_id,
RE_SPACE_NAME.test(space_id) ? "space_name" : "subdomain",
(status) => {
status_callback(status);
if (status.status === "running") {
resolve();
} else if (
status.status === "error" ||
status.status === "paused" ||
status.status === "space_error"
) {
resolve();
} else if (
status.status === "sleeping" ||
status.status === "building"
) {
if (retries < max_retries) {
retries++;
setTimeout(() => {
check_and_wake_space(space_id, status_callback).then(resolve);
}, check_interval);
} else {
resolve();
}
}
}
);
});
};
const RE_DISABLED_DISCUSSION =
/^(?=[^]*\b[dD]iscussions{0,1}\b)(?=[^]*\b[dD]isabled\b)[^]*$/;
export async function discussions_enabled(space_id: string): Promise<boolean> {
try {
const r = await fetch(
`https://huggingface.co/api/spaces/${space_id}/discussions`,
{
method: "HEAD"
}
);
const error = r.headers.get("x-error-message");
if (!r.ok || (error && RE_DISABLED_DISCUSSION.test(error))) return false;
return true;
} catch (e) {
return false;
}
}
export async function get_space_hardware(
space_id: string,
hf_token?: `hf_${string}` | undefined
): Promise<(typeof hardware_types)[number]> {
const headers: { Authorization?: string } = {};
if (hf_token) {
headers.Authorization = `Bearer ${hf_token}`;
}
try {
const res = await fetch(
`https://huggingface.co/api/spaces/${space_id}/${RUNTIME_URL}`,
{ headers }
);
if (res.status !== 200)
throw new Error("Space hardware could not be obtained.");
const { hardware } = await res.json();
return hardware.current;
} catch (e: any) {
throw new Error(e.message);
}
}
export async function set_space_timeout(
space_id: string,
timeout: number,
hf_token?: `hf_${string}`
): Promise<any> {
const headers: { Authorization?: string } = {};
if (hf_token) {
headers.Authorization = `Bearer ${hf_token}`;
}
const body: {
seconds?: number;
} = {
seconds: timeout
};
try {
const res = await fetch(
`https://huggingface.co/api/spaces/${space_id}/${SLEEPTIME_URL}`,
{
method: "POST",
headers: { "Content-Type": "application/json", ...headers },
body: JSON.stringify(body)
}
);
if (res.status !== 200) {
throw new Error(
"Could not set sleep timeout on duplicated Space. Please visit *ADD HF LINK TO SETTINGS* to set a timeout manually to reduce billing charges."
);
}
const response = await res.json();
return response;
} catch (e: any) {
throw new Error(e.message);
}
}
export const hardware_types = [
"cpu-basic",
"cpu-upgrade",
"cpu-xl",
"t4-small",
"t4-medium",
"a10g-small",
"a10g-large",
"a10g-largex2",
"a10g-largex4",
"a100-large",
"zero-a10g",
"h100",
"h100x8"
] as const;

View File

@ -0,0 +1,23 @@
export { Client } from "./client";
export { predict } from "./utils/predict";
export { submit } from "./utils/submit";
export { upload_files } from "./utils/upload_files";
export { FileData, upload, prepare_files } from "./upload";
export { handle_file } from "./helpers/data";
export type {
SpaceStatus,
StatusMessage,
Status,
client_return,
UploadResponse,
RenderMessage,
LogMessage,
Payload,
Config
} from "./types";
// todo: remove in @gradio/client v1.0
export { client } from "./client";
export { duplicate_space as duplicate } from "./client";

View File

@ -0,0 +1,660 @@
import {
INVALID_URL_MSG,
QUEUE_FULL_MSG,
SPACE_METADATA_ERROR_MSG
} from "../constants";
import { beforeAll, afterEach, afterAll, it, expect, describe } from "vitest";
import {
handle_message,
get_description,
get_type,
process_endpoint,
join_urls,
map_data_to_params
} from "../helpers/api_info";
import { initialise_server } from "./server";
import { transformed_api_info } from "./test_data";
const server = initialise_server();
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("handle_message", () => {
it("should return type 'data' when msg is 'send_data'", () => {
const data = { msg: "send_data" };
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({ type: "data" });
});
it("should return type 'hash' when msg is 'send_hash'", () => {
const data = { msg: "send_hash" };
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({ type: "hash" });
});
it("should return type 'update' with queue full message when msg is 'queue_full'", () => {
const data = { msg: "queue_full", code: 500, success: false };
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "update",
status: {
queue: true,
message: QUEUE_FULL_MSG,
stage: "error",
code: 500,
success: false
}
});
});
it("should return type 'heartbeat' when msg is 'heartbeat'", () => {
const data = { msg: "heartbeat" };
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({ type: "heartbeat" });
});
it("should return type 'unexpected_error' with error message when msg is 'unexpected_error'", () => {
const data = { msg: "unexpected_error", message: "Something went wrong" };
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "unexpected_error",
status: {
queue: true,
message: "Something went wrong",
stage: "error",
success: false
}
});
});
it("should return type 'update' with estimation status when msg is 'estimation'", () => {
const data = {
msg: "estimation",
code: 200,
queue_size: 10,
rank: 5,
rank_eta: 60,
success: true
};
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "update",
status: {
queue: true,
stage: "pending",
code: 200,
size: 10,
position: 5,
eta: 60,
success: true
}
});
});
it("should return type 'update' with progress status when msg is 'progress'", () => {
const data = {
msg: "progress",
code: 200,
progress_data: { current: 50, total: 100 },
success: true
};
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "update",
status: {
queue: true,
stage: "pending",
code: 200,
progress_data: { current: 50, total: 100 },
success: true
}
});
});
it("should return type 'log' with the provided data when msg is 'log'", () => {
const data = { msg: "log", log_data: "Some log message" };
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "log",
data: { msg: "log", log_data: "Some log message" }
});
});
it("should return type 'generating' with generating status when msg is 'process_generating' and success is true", () => {
const data = {
msg: "process_generating",
success: true,
code: 200,
progress_data: { current: 50, total: 100 },
average_duration: 120,
output: { result: "Some result" }
};
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "generating",
status: {
queue: true,
message: null,
stage: "generating",
code: 200,
progress_data: { current: 50, total: 100 },
eta: 120
},
data: { result: "Some result" }
});
});
it("should return type 'update' with error status when msg is 'process_generating' and success is false", () => {
const data = {
msg: "process_generating",
success: false,
code: 500,
progress_data: { current: 50, total: 100 },
average_duration: 120,
output: { error: "Error" }
};
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "generating",
data: null,
status: {
eta: 120,
queue: true,
message: "Error",
stage: "error",
code: 500,
progress_data: { current: 50, total: 100 }
}
});
});
it("should return type 'complete' with success status when msg is 'process_completed' and success is true", () => {
const data = {
msg: "process_completed",
success: true,
code: 200,
progress_data: { current: 100, total: 100 },
output: { result: "Some result" }
};
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "complete",
status: {
queue: true,
message: undefined,
stage: "complete",
code: 200,
progress_data: { current: 100, total: 100 }
},
data: { result: "Some result" }
});
});
it("should return type 'update' with error status when msg is 'process_completed' and success is false", () => {
const data = {
msg: "process_completed",
success: false,
code: 500,
progress_data: { current: 100, total: 100 },
output: { error: "Some error message" }
};
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "update",
status: {
queue: true,
message: "Some error message",
stage: "error",
code: 500,
success: false
}
});
});
it("should return type 'update' with pending status when msg is 'process_starts'", () => {
const data = {
msg: "process_starts",
code: 200,
rank: 5,
success: true,
eta: 60
};
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "update",
original_msg: "process_starts",
status: {
queue: true,
stage: "pending",
code: 200,
size: 5,
position: 0,
success: true,
eta: 60
}
});
});
it("should return type 'none' with error status when msg is unknown", () => {
const data = { msg: "unknown" };
const last_status = "pending";
const result = handle_message(data, last_status);
expect(result).toEqual({
type: "none",
status: { stage: "error", queue: true }
});
});
});
describe("get_description", () => {
it("should return 'array of [file, label] tuples' when serializer is 'GallerySerializable'", () => {
const type = { type: "string", description: "param description" };
const serializer = "GallerySerializable";
const result = get_description(type, serializer);
expect(result).toEqual("array of [file, label] tuples");
});
it("should return 'array of strings' when serializer is 'ListStringSerializable'", () => {
const type = { type: "string", description: "param description" };
const serializer = "ListStringSerializable";
const result = get_description(type, serializer);
expect(result).toEqual("array of strings");
});
it("should return 'array of files or single file' when serializer is 'FileSerializable'", () => {
const type = { type: "string", description: "param description" };
const serializer = "FileSerializable";
const result = get_description(type, serializer);
expect(result).toEqual("array of files or single file");
});
it("should return the type's description when serializer is not 'GallerySerializable', 'ListStringSerializable', or 'FileSerializable'", () => {
const type = { type: "string", description: "param description" };
const serializer = "SomeOtherSerializer";
const result = get_description(type, serializer);
expect(result).toEqual(type.description);
});
});
describe("get_type", () => {
it("should return 'string' when type is 'string'", () => {
const type = { type: "string", description: "param description" };
const component = "Component";
const serializer = "Serializer";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("string");
});
it("should return 'boolean' when type is 'boolean'", () => {
const type = { type: "boolean", description: "param description" };
const component = "Component";
const serializer = "Serializer";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("boolean");
});
it("should return 'number' when type is 'number'", () => {
const type = { type: "number", description: "param description" };
const component = "Component";
const serializer = "Serializer";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("number");
});
it("should return 'any' when serializer is 'JSONSerializable'", () => {
const type = { type: "any", description: "param description" };
const component = "Component";
const serializer = "JSONSerializable";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("any");
});
it("should return 'any' when serializer is 'StringSerializable'", () => {
const type = { type: "any", description: "param description" };
const component = "Component";
const serializer = "StringSerializable";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("any");
});
it("should return 'string[]' when serializer is 'ListStringSerializable'", () => {
const type = { type: "any", description: "param description" };
const component = "Component";
const serializer = "ListStringSerializable";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("string[]");
});
it("should return 'Blob | File | Buffer' when component is 'Image' and signature_type is 'parameter'", () => {
const type = { type: "any", description: "param description" };
const component = "Image";
const serializer = "Serializer";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("Blob | File | Buffer");
});
it("should return 'string' when component is 'Image' and signature_type is 'return'", () => {
const type = { type: "string", description: "param description" };
const component = "Image";
const serializer = "Serializer";
const signature_type = "return";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("string");
});
it("should return '(Blob | File | Buffer)[]' when serializer is 'FileSerializable' and type is an array and signature_type is 'parameter'", () => {
const type = { type: "array", description: "param description" };
const component = "Component";
const serializer = "FileSerializable";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("(Blob | File | Buffer)[]");
});
it("should return 'Blob | File | Buffer' when serializer is 'FileSerializable' and type is not an array and signature_type is 'return'", () => {
const type = { type: "any", description: "param description" };
const component = "Component";
const serializer = "FileSerializable";
const signature_type = "return";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual(
"{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}"
);
});
it("should return a FileData object when serializer is 'FileSerializable' and type is not an array and signature_type is 'return'", () => {
const type = { type: "any", description: "param description" };
const component = "Component";
const serializer = "FileSerializable";
const signature_type = "return";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual(
"{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}"
);
});
it("should return '[(Blob | File | Buffer), (string | null)][]' when serializer is 'GallerySerializable' and signature_type is 'parameter'", () => {
const type = { type: "any", description: "param description" };
const component = "Component";
const serializer = "GallerySerializable";
const signature_type = "parameter";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual("[(Blob | File | Buffer), (string | null)][]");
});
it("should return a FileData object when serializer is 'GallerySerializable' and signature_type is 'return'", () => {
const type = { type: "any", description: "param description" };
const component = "Component";
const serializer = "GallerySerializable";
const signature_type = "return";
const result = get_type(type, component, serializer, signature_type);
expect(result).toEqual(
"[{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}, (string | null))][]"
);
});
});
describe("process_endpoint", () => {
it("should return space_id, host, ws_protocol, and http_protocol when app_reference is a valid space name", async () => {
const app_reference = "hmb/hello_world";
const host = "hmb-hello-world.hf.space";
const hf_token = "hf_token";
const expected = {
space_id: app_reference,
host,
ws_protocol: "wss",
http_protocol: "https:"
};
const result = await process_endpoint(app_reference, hf_token);
expect(result).toEqual(expected);
});
it("should throw an error when fetching space metadata fails", async () => {
const app_reference = "hmb/bye_world";
const hf_token = "hf_token";
try {
await process_endpoint(app_reference, hf_token);
} catch (error) {
expect(error.message).toEqual(SPACE_METADATA_ERROR_MSG);
}
});
it("should return the correct data when app_reference is a valid space domain", async () => {
const app_reference = "hmb/hello_world";
const host = "hmb-hello-world.hf.space";
const expected = {
space_id: app_reference,
host,
ws_protocol: "wss",
http_protocol: "https:"
};
const result = await process_endpoint("hmb/hello_world");
expect(result).toEqual(expected);
});
it("processes local server URLs correctly", async () => {
const local_url = "http://localhost:7860/gradio";
const response_local_url = await process_endpoint(local_url);
expect(response_local_url.space_id).toBe(false);
expect(response_local_url.host).toBe("localhost:7860/gradio");
const local_url_2 = "http://localhost:7860/gradio/";
const response_local_url_2 = await process_endpoint(local_url_2);
expect(response_local_url_2.space_id).toBe(false);
expect(response_local_url_2.host).toBe("localhost:7860/gradio");
});
it("handles hugging face space references", async () => {
const space_id = "hmb/hello_world";
const response = await process_endpoint(space_id);
expect(response.space_id).toBe(space_id);
expect(response.host).toContain("hf.space");
});
it("handles hugging face domain URLs", async () => {
const app_reference = "https://hmb-hello-world.hf.space/";
const response = await process_endpoint(app_reference);
expect(response.space_id).toBe("hmb-hello-world");
expect(response.host).toBe("hmb-hello-world.hf.space");
});
it("handles huggingface subpath urls", async () => {
const app_reference =
"https://pngwn-pr-demos-test.hf.space/demo/audio_debugger/";
const response = await process_endpoint(app_reference);
expect(response.space_id).toBe("pngwn-pr-demos-test");
expect(response.host).toBe(
"pngwn-pr-demos-test.hf.space/demo/audio_debugger"
);
expect(response.http_protocol).toBe("https:");
});
});
describe("join_urls", () => {
it("joins URLs correctly", () => {
expect(join_urls("http://localhost:7860", "/gradio")).toBe(
"http://localhost:7860/gradio"
);
expect(join_urls("http://localhost:7860/", "/gradio")).toBe(
"http://localhost:7860/gradio"
);
expect(join_urls("http://localhost:7860", "app/", "/gradio")).toBe(
"http://localhost:7860/app/gradio"
);
expect(join_urls("http://localhost:7860/", "/app/", "/gradio/")).toBe(
"http://localhost:7860/app/gradio/"
);
expect(join_urls("http://127.0.0.1:8000/app", "/config")).toBe(
"http://127.0.0.1:8000/app/config"
);
expect(join_urls("http://127.0.0.1:8000/app/gradio", "/config")).toBe(
"http://127.0.0.1:8000/app/gradio/config"
);
});
it("throws an error when the URLs are not valid", () => {
expect(() => join_urls("localhost:7860", "/gradio")).toThrowError(
INVALID_URL_MSG
);
expect(() => join_urls("localhost:7860", "/gradio", "app")).toThrowError(
INVALID_URL_MSG
);
});
});
describe("map_data_params", () => {
let test_data = transformed_api_info;
test_data.named_endpoints["/predict"].parameters = [
{
parameter_name: "param1",
parameter_has_default: false,
label: "",
component: "",
serializer: "",
python_type: {
type: "",
description: ""
},
type: {
type: "",
description: ""
}
},
{
parameter_name: "param2",
parameter_has_default: false,
label: "",
type: {
type: "",
description: ""
},
component: "",
serializer: "",
python_type: {
type: "",
description: ""
}
},
{
parameter_name: "param3",
parameter_has_default: true,
parameter_default: 3,
label: "",
type: {
type: "",
description: ""
},
component: "",
serializer: "",
python_type: {
type: "",
description: ""
}
}
];
let endpoint_info = test_data.named_endpoints["/predict"];
it("should return an array of data when data is an array", () => {
const data = [1, 2];
const result = map_data_to_params(data, endpoint_info);
expect(result).toEqual(data);
});
it("should return an empty array when data is an empty array", () => {
const data = [];
const result = map_data_to_params(data, endpoint_info);
expect(result).toEqual(data);
});
it("should return an empty array when data is not defined", () => {
const data = undefined;
const result = map_data_to_params(data, endpoint_info);
expect(result).toEqual([]);
});
it("should return the data when too many arguments are provided for the endpoint", () => {
const data = [1, 2, 3, 4];
const result = map_data_to_params(data, endpoint_info);
expect(result).toEqual(data);
});
it("should return an array of resolved data when data is an object", () => {
const data = {
param1: 1,
param2: 2,
param3: 3
};
const result = map_data_to_params(data, endpoint_info);
expect(result).toEqual([1, 2, 3]);
});
it("should use the default value when a keyword argument is not provided and has a default value", () => {
const data = {
param1: 1,
param2: 2
};
const result = map_data_to_params(data, endpoint_info);
expect(result).toEqual([1, 2, 3]);
});
it("should throw an error when an invalid keyword argument is provided", () => {
const data = {
param1: 1,
param2: 2,
param3: 3,
param4: 4
};
expect(() => map_data_to_params(data, endpoint_info)).toThrowError(
"Parameter `param4` is not a valid keyword argument. Please refer to the API for usage."
);
});
it("should throw an error when no value is provided for a required parameter", () => {
const data = {};
expect(() => map_data_to_params(data, endpoint_info)).toThrowError(
"No value provided for required parameter: param1"
);
});
});

View File

@ -0,0 +1,28 @@
import { apply_diff } from "../utils/stream";
import { it, expect, describe } from "vitest";
describe("apply_diff", () => {
it("delete_operation_works", () => {
const data = [
{ content: "Hi", role: "user" },
{ content: "How can I assist you?", role: "assistant" }
];
const diff: any = [
["delete", [0], null],
["delete", [0], null]
];
const result = apply_diff(data, diff);
expect(result).toEqual([]);
});
it("delete_operation_works with multiple deletes", () => {
const data = ["a", "b", "c"];
const diff: any = [
["replace", [0], "d"],
["delete", [1], null],
["delete", [1], null]
];
const result = apply_diff(data, diff);
expect(result).toEqual(["d"]);
});
});

View File

@ -0,0 +1,449 @@
import { describe, it, expect, vi, afterEach } from "vitest";
import {
update_object,
walk_and_store_blobs,
skip_queue,
post_message,
handle_file,
handle_payload
} from "../helpers/data";
import { config_response, endpoint_info } from "./test_data";
import { BlobRef, Command } from "../types";
import { FileData } from "../upload";
const IS_NODE = process.env.TEST_MODE === "node";
describe("walk_and_store_blobs", () => {
it("should convert a Buffer to a Blob", async () => {
const buffer = Buffer.from("test data");
const parts = await walk_and_store_blobs(buffer, "text");
expect(parts).toHaveLength(1);
expect(parts[0].blob).toBeInstanceOf(Blob);
});
it("should return a Blob when passed a Blob", async () => {
const blob = new Blob(["test data"]);
const parts = await walk_and_store_blobs(
blob,
undefined,
[],
true,
endpoint_info
);
expect(parts[0].blob).toBeInstanceOf(Blob);
});
it("should handle arrays", async () => {
const image = new Blob([]);
const parts = await walk_and_store_blobs([image]);
expect(parts).toHaveLength(1);
expect(parts[0].blob).toBeInstanceOf(Blob);
expect(parts[0].path).toEqual(["0"]);
});
it("should handle deep structures", async () => {
const image = new Blob([]);
const parts = await walk_and_store_blobs({ a: { b: { data: { image } } } });
expect(parts).toHaveLength(1);
expect(parts[0].blob).toBeInstanceOf(Blob);
expect(parts[0].path).toEqual(["a", "b", "data", "image"]);
});
it("should handle deep structures with arrays", async () => {
const image = new Blob([]);
const parts = await walk_and_store_blobs({
a: [
{
b: [
{
data: [
{
image
}
]
}
]
}
]
});
expect(parts[0].blob).toBeInstanceOf(Blob);
});
it("should handle deep structures with arrays (with equality check)", async () => {
const image = new Blob([]);
const obj = {
a: [
{
b: [
{
data: [[image], image, [image, [image]]]
}
]
}
]
};
const parts = await walk_and_store_blobs(obj);
async function map_path(obj: Record<string, any>, parts: BlobRef[]) {
const { path, blob } = parts[parts.length - 1];
let ref = obj;
path.forEach((p) => (ref = ref[p]));
// since ref is a Blob and blob is a Blob, we deep equal check the two buffers instead
if (ref instanceof Blob && blob instanceof Blob) {
const refBuffer = Buffer.from(await ref.arrayBuffer());
const blobBuffer = Buffer.from(await blob.arrayBuffer());
return refBuffer.equals(blobBuffer);
}
return ref === blob;
}
expect(parts[0].blob).toBeInstanceOf(Blob);
expect(map_path(obj, parts)).toBeTruthy();
});
it("should handle buffer instances and return a BlobRef", async () => {
const buffer = Buffer.from("test");
const parts = await walk_and_store_blobs(buffer, undefined, ["blob"]);
expect(parts).toHaveLength(1);
expect(parts[0].blob).toBeInstanceOf(Blob);
expect(parts[0].path).toEqual(["blob"]);
});
it("should handle buffer instances with a path and return a BlobRef with the path", async () => {
const buffer = Buffer.from("test data");
const parts = await walk_and_store_blobs(buffer);
expect(parts).toHaveLength(1);
expect(parts[0].path).toEqual([]);
expect(parts[0].blob).toBeInstanceOf(Blob);
});
it("should convert an object with deep structures to BlobRefs", async () => {
const param = {
a: {
b: {
data: {
image: Buffer.from("test image")
}
}
}
};
const parts = await walk_and_store_blobs(param);
expect(parts).toHaveLength(1);
expect(parts[0].path).toEqual(["a", "b", "data", "image"]);
expect(parts[0].blob).toBeInstanceOf(Blob);
});
});
describe("update_object", () => {
it("should update the value of a nested property", () => {
const obj = {
a: {
b: {
c: "old value"
}
}
};
const stack = ["a", "b", "c"];
const new_val = "new value";
update_object(obj, new_val, stack);
expect(obj.a.b.c).toBe(new_val);
});
it("should throw an error for invalid key type", () => {
const obj = {
a: {
b: {
c: "value"
}
}
};
const stack = ["a", "b", true];
const newValue = "new value";
expect(() => {
// @ts-ignore
update_object(obj, newValue, stack);
}).toThrowError("Invalid key type");
});
});
describe("skip_queue", () => {
const id = 0;
const config = config_response;
it("should not skip queue when global and dependency queue is enabled", () => {
config.enable_queue = true;
config.dependencies.find((dep) => dep.id === id)!.queue = true;
const result = skip_queue(id, config_response);
expect(result).toBe(false);
});
it("should not skip queue when global queue is disabled and dependency queue is enabled", () => {
config.enable_queue = false;
config.dependencies.find((dep) => dep.id === id)!.queue = true;
const result = skip_queue(id, config_response);
expect(result).toBe(false);
});
it("should should skip queue when global queue and dependency queue is disabled", () => {
config.enable_queue = false;
config.dependencies.find((dep) => dep.id === id)!.queue = false;
const result = skip_queue(id, config_response);
expect(result).toBe(true);
});
it("should should skip queue when global queue is enabled and dependency queue is disabled", () => {
config.enable_queue = true;
config.dependencies.find((dep) => dep.id === id)!.queue = false;
const result = skip_queue(id, config_response);
expect(result).toBe(true);
});
});
describe("post_message", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("should send a message to the parent window and resolve with received data", async () => {
const test_data = { key: "value" };
const test_origin = "https://huggingface.co";
const post_message_mock = vi.fn();
global.window = {
// @ts-ignore
parent: {
postMessage: post_message_mock
}
};
const message_channel_mock = {
port1: {
onmessage: (handler) => {
onmessage = handler;
},
close: vi.fn()
},
port2: {}
};
vi.stubGlobal("MessageChannel", function () {
this.port1 = message_channel_mock.port1;
this.port2 = message_channel_mock.port2;
return this;
});
const promise = post_message(test_data, test_origin);
if (message_channel_mock.port1.onmessage) {
message_channel_mock.port1.onmessage({ data: test_data });
}
await expect(promise).resolves.toEqual(test_data);
expect(post_message_mock).toHaveBeenCalledWith(test_data, test_origin, [
message_channel_mock.port2
]);
});
});
describe("handle_file", () => {
it("should handle a Blob object and return the blob", () => {
const blob = new Blob(["test data"], { type: "image/png" });
const result = handle_file(blob) as FileData;
expect(result).toBe(blob);
});
it("should handle a Buffer object and return it as a blob", () => {
const buffer = Buffer.from("test data");
const result = handle_file(buffer) as FileData;
expect(result).toBeInstanceOf(Blob);
});
it("should handle a local file path and return a Command object", () => {
const file_path = "./owl.png";
const result = handle_file(file_path) as Command;
expect(result).toBeInstanceOf(Command);
expect(result).toEqual({
type: "command",
command: "upload_file",
meta: { path: "./owl.png", name: "./owl.png", orig_path: "./owl.png" },
fileData: undefined
});
});
it("should handle a File object and return it as FileData", () => {
if (IS_NODE) {
return;
}
const file = new File(["test image"], "test.png", { type: "image/png" });
const result = handle_file(file) as FileData;
expect(result).toBeInstanceOf(Blob);
});
it("should throw an error for invalid input", () => {
const invalid_input = 123;
expect(() => {
// @ts-ignore
handle_file(invalid_input);
}).toThrowError(
"Invalid input: must be a URL, File, Blob, or Buffer object."
);
});
});
describe("handle_payload", () => {
it("should return an input payload with null in place of `state` when with_null_state is true", () => {
const resolved_payload = [2];
const dependency = {
inputs: [1, 2]
};
const components = [
{ id: 1, type: "number" },
{ id: 2, type: "state" }
];
const with_null_state = true;
const result = handle_payload(
resolved_payload,
// @ts-ignore
dependency,
components,
"input",
with_null_state
);
expect(result).toEqual([2, null]);
});
it("should return an input payload with null in place of two `state` components when with_null_state is true", () => {
const resolved_payload = ["hello", "goodbye"];
const dependency = {
inputs: [1, 2, 3, 4]
};
const components = [
{ id: 1, type: "textbox" },
{ id: 2, type: "state" },
{ id: 3, type: "textbox" },
{ id: 4, type: "state" }
];
const with_null_state = true;
const result = handle_payload(
resolved_payload,
// @ts-ignore
dependency,
components,
"input",
with_null_state
);
expect(result).toEqual(["hello", null, "goodbye", null]);
});
it("should return an output payload without the state component value when with_null_state is false", () => {
const resolved_payload = ["hello", null];
const dependency = {
outputs: [2, 3]
};
const components = [
{ id: 2, type: "textbox" },
{ id: 3, type: "state" }
];
const with_null_state = false;
const result = handle_payload(
resolved_payload,
// @ts-ignore
dependency,
components,
"output",
with_null_state
);
expect(result).toEqual(["hello"]);
});
it("should return an ouput payload without the two state component values when with_null_state is false", () => {
const resolved_payload = ["hello", null, "world", null];
const dependency = {
outputs: [2, 3, 4, 5]
};
const components = [
{ id: 2, type: "textbox" },
{ id: 3, type: "state" },
{ id: 4, type: "textbox" },
{ id: 5, type: "state" }
];
const with_null_state = false;
const result = handle_payload(
resolved_payload,
// @ts-ignore
dependency,
components,
"output",
with_null_state
);
expect(result).toEqual(["hello", "world"]);
});
it("should return an ouput payload with the two state component values when with_null_state is true", () => {
const resolved_payload = ["hello", null, "world", null];
const dependency = {
outputs: [2, 3, 4, 5]
};
const components = [
{ id: 2, type: "textbox" },
{ id: 3, type: "state" },
{ id: 4, type: "textbox" },
{ id: 5, type: "state" }
];
const with_null_state = true;
const result = handle_payload(
resolved_payload,
// @ts-ignore
dependency,
components,
"output",
with_null_state
);
expect(result).toEqual(["hello", null, "world", null]);
});
it("should return the same payload where no state components are defined", () => {
const resolved_payload = ["hello", "world"];
const dependency = {
inputs: [2, 3]
};
const components = [
{ id: 2, type: "textbox" },
{ id: 3, type: "textbox" }
];
const with_null_state = true;
const result = handle_payload(
resolved_payload,
// @ts-ignore
dependency,
components,
"input",
with_null_state
);
expect(result).toEqual(["hello", "world"]);
});
});

View File

@ -0,0 +1,693 @@
import { HttpResponse, http, RequestHandler } from "msw";
import {
HOST_URL,
API_INFO_URL,
CONFIG_URL,
RUNTIME_URL,
SLEEPTIME_URL,
UPLOAD_URL,
BROKEN_CONNECTION_MSG,
LOGIN_URL
} from "../constants";
import {
response_api_info,
config_response,
whoami_response,
duplicate_response,
hardware_sleeptime_response,
discussions_response,
runtime_response
} from "./test_data";
const root_url = "https://huggingface.co";
export const direct_space_url = "https://hmb-hello-world.hf.space";
const private_space_url = "https://hmb-secret-world.hf.space";
const private_auth_space_url = "https://hmb-private-auth-space.hf.space";
const server_error_space_url = "https://hmb-server-error.hf.space";
const upload_server_test_space_url = "https://hmb-server-test.hf.space";
const auth_app_space_url = "https://hmb-auth-space.hf.space";
const unauth_app_space_url = "https://hmb-unauth-space.hf.space";
const invalid_auth_space_url = "https://hmb-invalid-auth-space.hf.space";
const server_error_reference = "hmb/server_error";
const app_reference = "hmb/hello_world";
const broken_app_reference = "hmb/bye_world";
const duplicate_app_reference = "gradio/hello_world";
const private_app_reference = "hmb/secret_world";
const server_test_app_reference = "hmb/server_test";
const auth_app_reference = "hmb/auth_space";
const unauth_app_reference = "hmb/unauth_space";
const invalid_auth_app_reference = "hmb/invalid_auth_space";
const private_auth_app_reference = "hmb/private_auth_space";
export const handlers: RequestHandler[] = [
// /host requests
http.get(`${root_url}/api/spaces/${app_reference}/${HOST_URL}`, () => {
return new HttpResponse(
JSON.stringify({
subdomain: "hmb-hello-world",
host: "https://hmb-hello-world.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${root_url}/api/spaces/${broken_app_reference}/${HOST_URL}`, () => {
return new HttpResponse(null, {
status: 404,
headers: {
"Content-Type": "application/json",
hf_token: "hf_123"
}
});
}),
http.get(
`${root_url}/api/spaces/${private_auth_app_reference}/${HOST_URL}`,
() => {
return new HttpResponse(
JSON.stringify({
subdomain: "hmb-private-auth-space",
host: "https://hmb-private-auth-space.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}
),
http.get(
`${root_url}/api/spaces/${private_app_reference}/${HOST_URL}`,
({ request }) => {
const token = request.headers.get("authorization")?.substring(7);
if (!token || token !== "hf_123") {
return new HttpResponse(null, {
status: 401,
headers: {
"Content-Type": "application/json"
}
});
}
return new HttpResponse(
JSON.stringify({
subdomain: private_app_reference,
host: private_space_url
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}
),
http.get(
`${root_url}/api/spaces/${server_error_reference}/${HOST_URL}`,
() => {
return new HttpResponse(
JSON.stringify({
subdomain: "hmb-server-test",
host: "https://hmb-server-test.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}
),
http.get(
`${root_url}/api/spaces/${server_test_app_reference}/${HOST_URL}`,
() => {
return new HttpResponse(
JSON.stringify({
subdomain: "hmb-server-test",
host: "https://hmb-server-test.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}
),
http.get(`${root_url}/api/spaces/${auth_app_reference}/${HOST_URL}`, () => {
return new HttpResponse(
JSON.stringify({
subdomain: "hmb-auth-space",
host: "https://hmb-auth-space.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(
`${root_url}/api/spaces/${invalid_auth_app_reference}/${HOST_URL}`,
() => {
return new HttpResponse(
JSON.stringify({
subdomain: "hmb-invalid-auth-space",
host: "https://hmb-invalid-auth-space.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}
),
http.get(
`${root_url}/api/spaces/${duplicate_app_reference}/${HOST_URL}`,
() => {
return new HttpResponse(
JSON.stringify({
subdomain: "gradio-hello-world",
host: "https://gradio-hello-world.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}
),
http.get(`${root_url}/api/spaces/${unauth_app_reference}/${HOST_URL}`, () => {
return new HttpResponse(
JSON.stringify({
subdomain: "hmb-unath-space",
host: "https://hmb-unauth-space.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
// /info requests
http.get(`${direct_space_url}/${API_INFO_URL}`, () => {
return new HttpResponse(JSON.stringify(response_api_info), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${upload_server_test_space_url}/${API_INFO_URL}`, () => {
return new HttpResponse(JSON.stringify(response_api_info), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${private_space_url}/${API_INFO_URL}`, () => {
return new HttpResponse(JSON.stringify(response_api_info), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${server_error_space_url}/${API_INFO_URL}`, () => {
return new HttpResponse(JSON.stringify(response_api_info), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${auth_app_space_url}/${API_INFO_URL}`, async () => {
return new HttpResponse(JSON.stringify(response_api_info), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${private_auth_space_url}/${API_INFO_URL}`, async () => {
return new HttpResponse(JSON.stringify(response_api_info), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
// /config requests
http.get(`${direct_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(JSON.stringify(config_response), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${private_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(
JSON.stringify({
...config_response,
root: "https://hmb-secret-world.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${upload_server_test_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(
JSON.stringify({
...config_response,
root: "https://hmb-server-test.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${private_auth_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(
JSON.stringify({
...config_response,
root: "https://hmb-private-auth-space.hf.space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${direct_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(JSON.stringify(config_response), {
status: 500,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${server_error_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(JSON.stringify(config_response), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${invalid_auth_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(JSON.stringify({ detail: "Unauthorized" }), {
status: 401,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${auth_app_space_url}/${CONFIG_URL}`, ({ request }) => {
return new HttpResponse(
JSON.stringify({
...config_response,
root: "https://hmb-auth-space.hf.space",
space_id: "hmb/auth_space"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${unauth_app_space_url}/${CONFIG_URL}`, () => {
return new HttpResponse(
JSON.stringify({
detail: "Unauthorized"
}),
{
status: 401,
headers: {
"Content-Type": "application/json"
}
}
);
}),
// /whoami requests
http.get(`${root_url}/api/whoami-v2`, () => {
return new HttpResponse(JSON.stringify(whoami_response), {
status: 200,
headers: {
"Content-Type": "application/json",
"hf-token": "hf_123"
}
});
}),
// /duplicate requests
http.post(
`${root_url}/api/spaces/${duplicate_app_reference}/duplicate`,
({ request }) => {
if (request.headers.get("authorization")?.substring(7) !== "hf_123") {
throw new HttpResponse(null, {
status: 401,
headers: {
"Content-Type": "application/json"
}
});
}
return new HttpResponse(JSON.stringify(duplicate_response), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}
),
// /sleeptime requests
http.post(`${root_url}/api/spaces/${app_reference}/${SLEEPTIME_URL}`, () => {
return new HttpResponse(JSON.stringify(hardware_sleeptime_response), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.post(
`${root_url}/api/spaces/${server_test_app_reference}/${SLEEPTIME_URL}`,
() => {
throw new HttpResponse(null, {
status: 500,
headers: {
"Content-Type": "application/json"
}
});
}
),
// /runtime requests
http.get(
`${root_url}/api/spaces/${broken_app_reference}/${RUNTIME_URL}`,
() => {
return new HttpResponse(null, {
status: 404,
headers: {
"Content-Type": "application/json"
}
});
}
),
http.get(`${root_url}/api/spaces/${app_reference}/${RUNTIME_URL}`, () => {
return new HttpResponse(JSON.stringify(hardware_sleeptime_response), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
// queue requests
http.get(`${direct_space_url}/queue/data`, () => {
return new HttpResponse(JSON.stringify({ event_id: "123" }), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.post(`${direct_space_url}/queue/join`, () => {
return new HttpResponse(JSON.stringify({ event_id: "123" }), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
// upload requests
http.post(`${direct_space_url}/${UPLOAD_URL}`, () => {
return new HttpResponse(JSON.stringify(["lion.jpg"]), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.post(`${upload_server_test_space_url}/${UPLOAD_URL}`, () => {
throw new HttpResponse(JSON.parse("Internal Server Error"), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
// discussions requests
http.head(`${root_url}/api/spaces/${app_reference}/discussions`, () => {
return new HttpResponse(JSON.stringify(discussions_response), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.head(
`${root_url}/api/spaces/${broken_app_reference}/discussions`,
() => {
throw new HttpResponse(
JSON.parse("Discussions are disabled for this repo"),
{
status: 403,
headers: {
"Content-Type": "application/json"
}
}
);
}
),
// space requests
http.get(`${root_url}/api/spaces/${app_reference}`, () => {
return new HttpResponse(
JSON.stringify({ id: app_reference, runtime: runtime_response }),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${root_url}/api/spaces/hmb/paused_space`, () => {
return new HttpResponse(
JSON.stringify({
id: app_reference,
runtime: { ...runtime_response, stage: "PAUSED" }
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${root_url}/api/spaces/hmb/building_space`, () => {
return new HttpResponse(
JSON.stringify({
id: app_reference,
runtime: { ...runtime_response, stage: "BUILDING" }
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${root_url}/api/spaces/hmb/stopped_space`, () => {
return new HttpResponse(
JSON.stringify({
id: app_reference,
runtime: { ...runtime_response, stage: "STOPPED" }
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${root_url}/api/spaces/hmb/failed_space`, () => {
throw new HttpResponse(null, {
status: 500,
headers: {
"Content-Type": "application/json"
}
});
}),
http.get(`${root_url}/api/spaces/${unauth_app_reference}`, () => {
return new HttpResponse(
JSON.stringify({
id: unauth_app_reference,
runtime: { ...runtime_response }
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
// jwt requests
http.get(`${root_url}/api/spaces/${app_reference}/jwt`, () => {
return new HttpResponse(
JSON.stringify({
token: "jwt_123"
}),
{
status: 200,
headers: {
"Content-Type": "application/json"
}
}
);
}),
http.get(`${root_url}/api/spaces/${broken_app_reference}/jwt`, () => {
return new HttpResponse(null, {
status: 500,
headers: {
"Content-Type": "application/json"
}
});
}),
// post_data requests
http.post(`${direct_space_url}`, () => {
return new HttpResponse(JSON.stringify({}), {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
http.post(`${private_space_url}`, () => {
return new HttpResponse(JSON.stringify(BROKEN_CONNECTION_MSG), {
status: 500,
headers: {
"Content-Type": "application/json"
}
});
}),
// heartbeat requests
http.get(`*/heartbeat/*`, () => {
return new HttpResponse(null, {
status: 200,
headers: {
"Content-Type": "application/json"
}
});
}),
// login requests
http.post(`${auth_app_space_url}/${LOGIN_URL}`, async ({ request }) => {
let username;
let password;
await request.formData().then((data) => {
username = data.get("username");
password = data.get("password");
});
if (username === "admin" && password === "pass1234") {
return new HttpResponse(
JSON.stringify({
success: true
}),
{
status: 200,
headers: {
"Content-Type": "application/json",
"Set-Cookie":
"access-token-123=abc; HttpOnly; Path=/; SameSite=none; Secure",
// @ts-ignore - multiple Set-Cookie headers are returned
"Set-Cookie":
"access-token-unsecure-123=abc; HttpOnly; Path=/; SameSite=none; Secure"
}
}
);
}
return new HttpResponse(null, {
status: 401,
headers: {
"Content-Type": "application/json"
}
});
}),
http.post(`${invalid_auth_space_url}/${LOGIN_URL}`, async () => {
return new HttpResponse(null, {
status: 401,
headers: {
"Content-Type": "application/json"
}
});
}),
http.post(`${private_auth_space_url}/${LOGIN_URL}`, async ({ request }) => {
let username;
let password;
await request.formData().then((data) => {
username = data.get("username");
password = data.get("password");
});
if (username === "admin" && password === "pass1234") {
return new HttpResponse(
JSON.stringify({
success: true
}),
{
status: 200,
headers: {
"Content-Type": "application/json",
"Set-Cookie":
"access-token-123=abc; HttpOnly; Path=/; SameSite=none; Secure",
// @ts-ignore - multiple Set-Cookie headers are returned
"Set-Cookie":
"access-token-unsecure-123=abc; HttpOnly; Path=/; SameSite=none; Secure"
}
}
);
}
return new HttpResponse(null, {
status: 401,
headers: {
"Content-Type": "application/json"
}
});
})
];

View File

@ -0,0 +1,162 @@
import {
describe,
beforeAll,
afterEach,
afterAll,
test,
expect,
vi
} from "vitest";
import { Client, client, duplicate } from "..";
import {
transformed_api_info,
config_response,
response_api_info
} from "./test_data";
import { initialise_server } from "./server";
import { SPACE_METADATA_ERROR_MSG } from "../constants";
const app_reference = "hmb/hello_world";
const broken_app_reference = "hmb/bye_world";
const direct_app_reference = "https://hmb-hello-world.hf.space";
const secret_direct_app_reference = "https://hmb-secret-world.hf.space";
const server = initialise_server();
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("Client class", () => {
describe("initialisation", () => {
test("fetch is bound to the Client instance", async () => {
const test = await Client.connect("hmb/hello_world");
const fetch_method = test.fetch;
const res = await fetch_method(direct_app_reference + "/info");
await expect(res.json()).resolves.toEqual(response_api_info);
});
test("stream is bound to the Client instance", async () => {
const test = await Client.connect("hmb/hello_world");
const stream_method = test.stream;
const url = new URL(`${direct_app_reference}/queue/data`);
const stream = stream_method(url);
expect(stream).toBeDefined();
expect(stream.onmessage).toBeDefined();
});
test("backwards compatibility of client using deprecated syntax", async () => {
const app = await client(app_reference);
expect(app.config).toEqual(config_response);
});
test("connecting to a running app with a space reference", async () => {
const app = await Client.connect(app_reference);
expect(app.config).toEqual(config_response);
});
test("connecting to a running app with a direct app URL", async () => {
const app = await Client.connect(direct_app_reference);
expect(app.config).toEqual(config_response);
});
test("connecting successfully to a private running app with a space reference", async () => {
const app = await Client.connect("hmb/secret_world", {
hf_token: "hf_123"
});
expect(app.config).toEqual({
...config_response,
root: "https://hmb-secret-world.hf.space"
});
});
test("connecting successfully to a private running app with a direct app URL ", async () => {
const app = await Client.connect(secret_direct_app_reference, {
hf_token: "hf_123"
});
expect(app.config).toEqual({
...config_response,
root: "https://hmb-secret-world.hf.space"
});
});
test("unsuccessfully attempting to connect to a private running app", async () => {
await expect(
Client.connect("hmb/secret_world", {
hf_token: "hf_bad_token"
})
).rejects.toThrowError(SPACE_METADATA_ERROR_MSG);
});
test("viewing the api info of a running app", async () => {
const app = await Client.connect(app_reference);
expect(await app.view_api()).toEqual(transformed_api_info);
});
test("viewing the api info of a non-existent app", async () => {
const app = Client.connect(broken_app_reference);
await expect(app).rejects.toThrowError();
});
});
describe("duplicate", () => {
test("backwards compatibility of duplicate using deprecated syntax", async () => {
const app = await duplicate("gradio/hello_world", {
hf_token: "hf_123",
private: true,
hardware: "cpu-basic"
});
expect(app.config).toEqual(config_response);
});
test("creating a duplicate of a running app", async () => {
const duplicate = await Client.duplicate("gradio/hello_world", {
hf_token: "hf_123",
private: true,
hardware: "cpu-basic"
});
expect(duplicate.config).toEqual(config_response);
});
test("creating a duplicate of a running app without a token", async () => {
const duplicate = Client.duplicate("gradio/hello_world", {
private: true,
hardware: "cpu-basic"
});
await expect(duplicate).rejects.toThrow("Error: Unauthorized");
});
test("creating a duplicate of a broken app", async () => {
const duplicate = Client.duplicate(broken_app_reference);
await expect(duplicate).rejects.toThrow(SPACE_METADATA_ERROR_MSG);
});
});
describe("overriding the Client class", () => {
// TODO: broken test since https://github.com/gradio-app/gradio/pull/10890
test.skip("overriding methods on the Client class", async () => {
const mocked_fetch = vi.fn(
(input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
return Promise.resolve(
new Response(JSON.stringify({ data: "test" }))
);
}
);
class CustomClient extends Client {
fetch = mocked_fetch;
}
await CustomClient.connect("hmb/hello_world");
expect(mocked_fetch).toHaveBeenCalled();
});
});
});

View File

@ -0,0 +1,136 @@
import {
resolve_root,
get_jwt,
determine_protocol,
parse_and_set_cookies
} from "../helpers/init_helpers";
import { initialise_server } from "./server";
import { beforeAll, afterEach, afterAll, it, expect, describe } from "vitest";
import { Client } from "../client";
import { INVALID_CREDENTIALS_MSG, MISSING_CREDENTIALS_MSG } from "../constants";
const server = initialise_server();
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("resolve_root", () => {
it('should return the base URL if the root path starts with "http://"', () => {
const base_url = "https://huggingface.co";
const root_path = "https://hmb-hello-world.hf.space";
const prioritize_base = true;
const result = resolve_root(base_url, root_path, prioritize_base);
expect(result).toBe(base_url);
});
it('should return the base URL if the root path starts with "https://"', () => {
const base_url = "https://huggingface.co";
const root_path = "https://hmb-hello-world.hf.space";
const prioritize_base = true;
const result = resolve_root(base_url, root_path, prioritize_base);
expect(result).toBe(base_url);
});
});
describe("get_jwt", () => {
it("should return a valid JWT token when the API call is successful", async () => {
const space = "hmb/hello_world";
const token = "hf_123";
const expected_jwt = "jwt_123";
const result = await get_jwt(space, token);
expect(result).toBe(expected_jwt);
});
it("should return false when the API call fails", async () => {
const space = "hmb/bye_world";
const token = "hf_123";
const result = await get_jwt(space, token);
expect(result).toBe(false);
});
});
describe("determine_protocol", () => {
it('should return the correct protocols and host when the endpoint starts with "http"', () => {
const endpoint = "http://huggingface.co";
const result = determine_protocol(endpoint);
expect(result).toEqual({
ws_protocol: "ws",
http_protocol: "http:",
host: "huggingface.co"
});
});
it('should return the correct protocols and host when the endpoint starts with "https"', () => {
const endpoint = "https://huggingface.co";
const result = determine_protocol(endpoint);
expect(result).toEqual({
ws_protocol: "wss",
http_protocol: "https:",
host: "huggingface.co"
});
});
it('should return the correct protocols and host when the endpoint starts with "file"', () => {
const endpoint = "file:///path/to/app.html";
const result = determine_protocol(endpoint);
expect(result).toEqual({
ws_protocol: "ws",
http_protocol: "http:",
host: "lite.local"
});
});
});
describe("parse_and_set_cookies", () => {
it("should return an empty array when the cookie header is empty", () => {
const cookie_header = "";
const result = parse_and_set_cookies(cookie_header);
expect(result).toEqual([]);
});
it("should parse the cookie header and return an array of cookies", () => {
const cookie_header = "access-token-123=abc;access-token-unsecured-456=def";
const result = parse_and_set_cookies(cookie_header);
expect(result).toEqual(["access-token-123=abc"]);
});
});
describe("resolve_cookies", () => {
it("should set the cookies when correct auth credentials are provided", async () => {
const client = await Client.connect("hmb/auth_space", {
auth: ["admin", "pass1234"]
});
const api = client.view_api();
expect((await api).named_endpoints["/predict"]).toBeDefined();
});
it("should connect to a private and authenticated space", async () => {
const client = await Client.connect("hmb/private_auth_space", {
hf_token: "hf_123",
auth: ["admin", "pass1234"]
});
const api = client.view_api();
expect((await api).named_endpoints["/predict"]).toBeDefined();
});
it("should not set the cookies when auth credentials are invalid", async () => {
await expect(
Client.connect("hmb/invalid_auth_space", {
auth: ["admin", "wrong_password"]
})
).rejects.toThrowError(INVALID_CREDENTIALS_MSG);
});
it("should not set the cookies when auth option is not provided in an auth space", async () => {
await expect(Client.connect("hmb/unauth_space")).rejects.toThrowError(
MISSING_CREDENTIALS_MSG
);
});
});

View File

@ -0,0 +1,13 @@
import { vi } from "vitest";
if (process.env.TEST_MODE !== "node") {
Object.defineProperty(window, "EventSource", {
writable: true,
value: vi.fn().mockImplementation(() => ({
close: vi.fn(() => {}),
addEventListener: vi.fn(),
onmessage: vi.fn((_event: MessageEvent) => {}),
onerror: vi.fn((_event: Event) => {})
}))
});
}

View File

@ -0,0 +1,45 @@
import { Client } from "../client";
import { initialise_server } from "./server";
import { BROKEN_CONNECTION_MSG } from "../constants";
const server = initialise_server();
import { beforeAll, afterEach, afterAll, it, expect, describe } from "vitest";
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("post_data", () => {
it("should send a POST request with the correct headers and body", async () => {
const app = await Client.connect("hmb/hello_world");
const config = app.config;
const url = config?.root;
const body = { data: "test" };
if (!url) {
throw new Error("No URL provided");
}
const [response, status] = await app.post_data(url, body);
expect(response).toEqual({});
expect(status).toBe(200);
});
it("should handle network errors", async () => {
const app = await Client.connect("hmb/secret_world", {
hf_token: "hf_123"
});
const url = "https://hmb-secret-world.hf.space";
if (!url) {
throw new Error("No URL provided");
}
const [response, status] = await app.post_data(url, {});
expect(response).toEqual(BROKEN_CONNECTION_MSG);
expect(status).toBe(500);
});
});

View File

@ -0,0 +1,6 @@
import { setupServer } from "msw/node";
import { handlers } from "./handlers";
export function initialise_server(): any {
return setupServer(...handlers);
}

View File

@ -0,0 +1,145 @@
import { SPACE_STATUS_ERROR_MSG } from "../constants";
import {
discussions_enabled,
get_space_hardware,
set_space_timeout,
check_space_status
} from "../helpers/spaces";
import { beforeAll, afterEach, afterAll, it, expect, describe } from "vitest";
import { initialise_server } from "./server";
import { hardware_sleeptime_response } from "./test_data";
import { vi } from "vitest";
const server = initialise_server();
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("set_space_timeout", () => {
it("should set the sleep timeout for a space", async () => {
const space_id = "hmb/hello_world";
const timeout = 60;
const hf_token = "hf_123";
const response = await set_space_timeout(space_id, timeout, hf_token);
expect(response).toEqual(hardware_sleeptime_response);
});
it("should throw an error if the fetch call fails", async () => {
const space_id = "hmb/server_test";
const timeout = 60;
const hf_token = "hf_123";
await expect(
set_space_timeout(space_id, timeout, hf_token)
).rejects.toThrow(
"Could not set sleep timeout on duplicated Space. Please visit *ADD HF LINK TO SETTINGS* to set a timeout manually to reduce billing charges."
);
});
});
describe("get_space_hardware", () => {
it("should return the current hardware for a space", async () => {
const space_id = "hmb/hello_world";
const hf_token = "hf_123";
const hardware = await get_space_hardware(space_id, hf_token);
expect(hardware).toEqual(hardware_sleeptime_response.hardware.current);
});
it("should throw an error if the fetch call fails", async () => {
const space_id = "hmb/bye_world";
await expect(get_space_hardware(space_id)).rejects.toThrow(
"Space hardware could not be obtained."
);
});
});
describe("discussions_enabled", () => {
it("should return true if discussions are enabled for the space", async () => {
const space_id = "hmb/hello_world";
const result = await discussions_enabled(space_id);
expect(result).toBe(true);
});
it("should return false if discussions are disabled for the space", async () => {
const space_id = "hmb/bye_world";
const result = await discussions_enabled(space_id);
expect(result).toBe(false);
});
});
describe("check_space_status", () => {
const status_callback = vi.fn();
it("should handle a successful response with RUNNING stage", async () => {
const id = "hmb/hello_world";
const type = "space_name";
await check_space_status(id, type, status_callback);
expect(status_callback).toHaveBeenCalledWith({
status: "running",
load_status: "complete",
message: "Space is running.",
detail: "RUNNING"
});
});
it("should handle a successful response with PAUSED stage", async () => {
const id = "hmb/paused_space";
const type = "space_name";
await check_space_status(id, type, status_callback);
expect(status_callback).toHaveBeenCalledWith({
status: "paused",
load_status: "error",
message:
"This space has been paused by the author. If you would like to try this demo, consider duplicating the space.",
detail: "PAUSED",
discussions_enabled: true
});
});
it("should handle a successful response with BUILDING stage", async () => {
const id = "hmb/building_space";
const type = "space_name";
await check_space_status(id, type, status_callback);
expect(status_callback).toHaveBeenCalledWith({
status: "building",
load_status: "pending",
message: "Space is building...",
detail: "BUILDING"
});
});
it("should handle a successful response with STOPPED stage", async () => {
const id = "hmb/stopped_space";
const type = "space_name";
await check_space_status(id, type, status_callback);
expect(status_callback).toHaveBeenCalledWith({
status: "sleeping",
load_status: "pending",
message: "Space is asleep. Waking it up...",
detail: "STOPPED"
});
});
it("should handle a failed response", async () => {
const id = "hmb/failed_space";
const type = "space_name";
await check_space_status(id, type, status_callback);
expect(status_callback).toHaveBeenCalledWith({
status: "error",
load_status: "error",
message: SPACE_STATUS_ERROR_MSG,
detail: "NOT_FOUND"
});
});
});

View File

@ -0,0 +1,81 @@
import { vi, type Mock } from "vitest";
import { Client } from "../client";
import { readable_stream } from "../utils/stream";
import { initialise_server } from "./server";
import { direct_space_url } from "./handlers.ts";
import {
describe,
it,
expect,
afterEach,
beforeAll,
afterAll,
beforeEach
} from "vitest";
const server = initialise_server();
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("open_stream", () => {
let app: Client;
beforeEach(async () => {
app = await Client.connect("hmb/hello_world");
app.stream = vi.fn().mockImplementation(() => {
app.stream_instance = readable_stream(
new URL(`${direct_space_url}/queue/data`)
);
return app.stream_instance;
});
});
afterEach(() => {
vi.clearAllMocks();
});
it("should throw an error if config is not defined", () => {
app.config = undefined;
expect(async () => {
await app.open_stream();
}).rejects.toThrow("Could not resolve app config");
});
it("should connect to the SSE endpoint and handle messages", async () => {
await app.open_stream();
const eventsource_mock_call = (app.stream as Mock).mock.calls[0][0];
expect(eventsource_mock_call.href).toMatch(
/https:\/\/hmb-hello-world\.hf\.space\/queue\/data\?session_hash/
);
expect(app.stream).toHaveBeenCalledWith(eventsource_mock_call);
if (!app.stream_instance?.onmessage || !app.stream_instance?.onerror) {
throw new Error("stream instance is not defined");
}
const onMessageCallback = app.stream_instance.onmessage.bind(app);
const onErrorCallback = app.stream_instance.onerror.bind(app);
const message = { msg: "hello jerry" };
onMessageCallback({ data: JSON.stringify(message) });
expect(app.stream_status.open).toBe(true);
expect(app.event_callbacks).toEqual({});
expect(app.pending_stream_messages).toEqual({});
const close_stream_message = { msg: "close_stream" };
onMessageCallback({ data: JSON.stringify(close_stream_message) });
expect(app.stream_status.open).toBe(false);
onErrorCallback({ data: JSON.stringify("404") });
expect(app.stream_status.open).toBe(false);
});
});

View File

@ -0,0 +1,559 @@
// @ts-nocheck
import { ApiData, ApiInfo, Config, EndpointInfo } from "../types";
export const runtime_response = {
stage: "RUNNING",
hardware: {
current: "cpu-basic",
requested: "cpu-basic"
},
storage: {
current: null,
requested: null
},
gcTimeout: 86400,
replicas: {
current: 1,
requested: 1
},
devMode: false,
domains: [
{
domain: "hmb-hello-world.hf.space",
isCustom: false,
stage: "READY"
}
]
};
export const transformed_api_info: ApiInfo<ApiData> = {
named_endpoints: {
"/predict": {
parameters: [
{
label: "name",
type: "string",
python_type: { type: "str", description: "" },
component: "Textbox",
example_input: "Hello!!"
}
],
returns: [
{
label: "output",
type: "string",
python_type: { type: "str", description: "" },
component: "Textbox"
}
],
type: { generator: false, cancel: false }
}
},
unnamed_endpoints: {
"0": {
parameters: [
{
label: "name",
type: "string",
python_type: { type: "str", description: "" },
component: "Textbox",
example_input: "Hello!!"
}
],
returns: [
{
label: "output",
type: "string",
python_type: { type: "str", description: "" },
component: "Textbox"
}
],
type: { generator: false, cancel: false }
}
}
};
export const response_api_info: ApiInfo<ApiData> = {
named_endpoints: {
"/predict": {
parameters: [
{
label: "name",
type: {
type: "string"
},
python_type: {
type: "str",
description: ""
},
component: "Textbox",
example_input: "Hello!!"
}
],
returns: [
{
label: "output",
type: {
type: "string"
},
python_type: {
type: "str",
description: ""
},
component: "Textbox"
}
]
}
},
unnamed_endpoints: {}
};
export const config_response: Config = {
version: "4.27.0",
mode: "interface",
app_id: 123,
dev_mode: false,
analytics_enabled: true,
components: [
{
id: 3,
type: "row",
props: {
variant: "default",
visible: true,
equal_height: false,
name: "row"
},
skip_api: true,
component_class_id: ""
},
{
id: 4,
type: "column",
props: {
scale: 1,
min_width: 320,
variant: "panel",
visible: true,
name: "column"
},
skip_api: true,
component_class_id: ""
},
{
id: 5,
type: "column",
props: {
scale: 1,
min_width: 320,
variant: "default",
visible: true,
name: "column"
},
skip_api: true,
component_class_id: ""
},
{
id: 1,
type: "textbox",
props: {
lines: 1,
max_lines: 20,
label: "name",
show_label: true,
container: true,
min_width: 160,
visible: true,
autofocus: false,
autoscroll: true,
elem_classes: [],
type: "text",
rtl: false,
show_copy_button: false,
name: "textbox",
_selectable: false
},
skip_api: false,
component_class_id: "",
api_info: {
type: "string"
},
example_inputs: "Hello!!"
},
{
id: 6,
type: "form",
props: {
scale: 0,
min_width: 0,
name: "form"
},
skip_api: true,
component_class_id: ""
},
{
id: 7,
type: "row",
props: {
variant: "default",
visible: true,
equal_height: true,
name: "row"
},
skip_api: true,
component_class_id: ""
},
{
id: 8,
type: "button",
props: {
value: "Clear",
variant: "secondary",
visible: true,
interactive: true,
elem_classes: [],
show_api: false,
name: "button",
_selectable: false
},
skip_api: true,
component_class_id: ""
},
{
id: 9,
type: "button",
props: {
value: "Submit",
variant: "primary",
visible: true,
interactive: true,
elem_classes: [],
name: "button",
_selectable: false
},
skip_api: true,
component_class_id: ""
},
{
id: 10,
type: "column",
props: {
scale: 1,
min_width: 320,
variant: "panel",
visible: true,
name: "column"
},
skip_api: true,
component_class_id: ""
},
{
id: 2,
type: "textbox",
props: {
lines: 1,
max_lines: 20,
label: "output",
show_label: true,
container: true,
min_width: 160,
interactive: false,
visible: true,
autofocus: false,
autoscroll: true,
elem_classes: [],
type: "text",
rtl: false,
show_copy_button: false,
name: "textbox",
_selectable: false
},
skip_api: false,
component_class_id: "",
api_info: {
type: "string"
},
example_inputs: "Hello!!"
},
{
id: 11,
type: "row",
props: {
variant: "default",
visible: true,
equal_height: true,
name: "row"
},
skip_api: true,
component_class_id: ""
},
{
id: 12,
type: "form",
props: {
scale: 0,
min_width: 0,
name: "form"
},
skip_api: true,
component_class_id: ""
}
],
css: null,
js: null,
head: null,
title: "Gradio",
space_id: "hmb/hello_world",
enable_queue: true,
show_error: false,
show_api: true,
is_colab: false,
stylesheets: [],
theme: "default",
protocol: "sse_v3",
body_css: {
body_background_fill: "white",
body_text_color: "#1f2937",
body_background_fill_dark: "#0b0f19",
body_text_color_dark: "#f3f4f6"
},
fill_height: false,
layout: {
id: 0,
children: [
{
id: 3,
children: [
{
id: 4,
children: [
{
id: 5,
children: [
{
id: 6,
children: [
{
id: 1
}
]
}
]
},
{
id: 7,
children: [
{
id: 8
},
{
id: 9
}
]
}
]
},
{
id: 10,
children: [
{
id: 12,
children: [
{
id: 2
}
]
},
{
id: 11,
children: []
}
]
}
]
}
]
},
dependencies: [
{
id: 0,
targets: [
[9, "click"],
[1, "submit"]
],
inputs: [1],
outputs: [2],
backend_fn: true,
js: null,
queue: null,
api_name: "predict",
scroll_to_output: false,
show_progress: "full",
every: null,
batch: false,
max_batch_size: 4,
cancels: [],
types: {
generator: false,
cancel: false
},
collects_event_data: false,
trigger_after: null,
trigger_only_on_success: false,
trigger_mode: "once",
show_api: true,
zerogpu: false
},
{
id: 1,
targets: [[8, "click"]],
inputs: [],
outputs: [1, 2],
backend_fn: false,
js: "() => [null, null]",
queue: false,
api_name: "js_fn",
scroll_to_output: false,
show_progress: "full",
every: null,
batch: false,
max_batch_size: 4,
cancels: [],
types: {
generator: false,
cancel: false
},
collects_event_data: false,
trigger_after: null,
trigger_only_on_success: false,
trigger_mode: "once",
show_api: false,
zerogpu: false
},
{
id: 2,
targets: [[8, "click"]],
inputs: [],
outputs: [5],
backend_fn: false,
js: '() => [{"variant": null, "visible": true, "__type__": "update"}]\n ',
queue: false,
api_name: "js_fn_1",
scroll_to_output: false,
show_progress: "full",
every: null,
batch: false,
max_batch_size: 4,
cancels: [],
types: {
generator: false,
cancel: false
},
collects_event_data: false,
trigger_after: null,
trigger_only_on_success: false,
trigger_mode: "once",
show_api: false,
zerogpu: false
}
],
root: "https://hmb-hello-world.hf.space",
path: ""
};
export const whoami_response = {
type: "user",
id: "123",
name: "hmb",
fullname: "jerry",
email: "jerry@gradio.com",
emailVerified: true,
canPay: true,
periodEnd: 123,
isPro: false,
avatarUrl: "",
orgs: [],
auth: {
type: "access_token",
accessToken: {
displayName: "Gradio Client",
role: "write"
}
}
};
export const duplicate_response = {
url: "https://huggingface.co/spaces/hmb/hello_world"
};
export const hardware_sleeptime_response = {
stage: "RUNNING",
hardware: {
current: "cpu-basic",
requested: "cpu-upgrade"
},
storage: null,
gcTimeout: 300,
replicas: {
current: 1,
requested: 1
},
devMode: false,
domains: [
{
domain: "hmb-hello-world.hf.space",
isCustom: false,
stage: "READY"
}
]
};
export const endpoint_info: EndpointInfo<ApiData> = {
parameters: [
{
label: "parameter_2",
parameter_name: "im",
parameter_has_default: false,
parameter_default: null,
type: "",
python_type: {
type: "Dict(background: filepath | None, layers: List[filepath], composite: filepath | None, id: str | None)",
description: ""
},
component: "Imageeditor",
example_input: {
background: {
path: "",
meta: {
_type: "gradio.FileData"
},
orig_name: "bus.png",
url: ""
},
layers: [],
composite: null
}
}
],
returns: [
{
label: "value_3",
type: "string",
python_type: {
type: "filepath",
description: ""
},
component: "Image"
}
],
type: {
generator: false
}
};
export const discussions_response = {
discussions: [],
count: 0,
start: 0,
numClosedDiscussions: 0
};

View File

@ -0,0 +1,42 @@
import { describe, it, expect, afterEach, beforeAll, afterAll } from "vitest";
import { Client } from "..";
import { initialise_server } from "./server";
const server = initialise_server();
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("upload_files", () => {
it("should upload files successfully", async () => {
const root_url = "https://hmb-hello-world.hf.space";
const client = await Client.connect("hmb/hello_world", {
hf_token: "hf_token"
});
const files = [new Blob([], { type: "image/jpeg" })];
const response = await client.upload_files(root_url, files);
if (!response.files) {
throw new Error("No files returned");
}
expect(response.files).toHaveLength(1);
expect(response.files[0]).toBe("lion.jpg");
});
it.skip("should handle a server error when connected to a running app and uploading files", async () => {
const client = await Client.connect("hmb/server_test");
const root_url = "https://hmb-server-test.hf.space";
const files = [new Blob([""], { type: "text/plain" })];
await expect(client.upload_files(root_url, files)).rejects.toThrow(
"Connection errored out. Failed to fetch"
);
});
});

View File

@ -0,0 +1,53 @@
import { describe, beforeAll, afterEach, afterAll, test, expect } from "vitest";
import { Client, client, duplicate } from "..";
import { transformed_api_info, config_response } from "./test_data";
import { initialise_server } from "./server";
const app_reference = "hmb/hello_world";
const secret_app_reference = "hmb/secret_world";
const secret_direct_app_reference = "https://hmb-secret-world.hf.space";
const server = initialise_server();
beforeAll(() => server.listen());
afterEach(() => server.resetHandlers());
afterAll(() => server.close());
describe("view_api", () => {
test("viewing the api of a running, public app", async () => {
const app = await Client.connect(app_reference);
expect(await app.view_api()).toEqual(transformed_api_info);
});
test("viewing the api of a running, private app", async () => {
const app = await Client.connect(secret_app_reference, {
hf_token: "hf_123"
});
expect(app.config).toEqual({
...config_response,
root: secret_direct_app_reference
});
expect(await app.view_api()).toEqual({
...transformed_api_info
});
});
test("viewing the api of a running, private app with a direct app URL", async () => {
const app = await Client.connect(secret_direct_app_reference, {
hf_token: "hf_123"
});
expect(app.config).toEqual({
...config_response,
root: secret_direct_app_reference
});
expect(await app.view_api()).toEqual({
...transformed_api_info
});
});
});

View File

@ -0,0 +1,412 @@
// API Data Types
import { hardware_types } from "./helpers/spaces";
import type { SvelteComponent } from "svelte";
import type { ComponentType } from "svelte";
export interface ApiData {
label: string;
parameter_name: string;
parameter_default?: any;
parameter_has_default?: boolean;
type: {
type: any;
description: string;
};
component: string;
example_input?: any;
python_type: { type: string; description: string };
serializer: string;
}
export interface JsApiData {
label: string;
parameter_name: string;
parameter_default?: any;
parameter_has_default?: boolean;
type: string;
description: string;
component: string;
example_input?: any;
serializer: string;
python_type: { type: string; description: string };
}
export interface EndpointInfo<T extends ApiData | JsApiData> {
parameters: T[];
returns: T[];
type?: DependencyTypes;
}
export interface ApiInfo<T extends ApiData | JsApiData> {
named_endpoints: Record<string, EndpointInfo<T>>;
unnamed_endpoints: Record<string, EndpointInfo<T>>;
}
export interface BlobRef {
path: string[];
type: string | undefined;
blob: Blob | File | false;
}
export type DataType = string | Buffer | Record<string, any> | any[];
// custom class used for uploading local files
export class Command {
type: string;
command: string;
meta: {
path: string;
name: string;
orig_path: string;
};
fileData?: FileData;
constructor(
command: string,
meta: { path: string; name: string; orig_path: string }
) {
this.type = "command";
this.command = command;
this.meta = meta;
}
}
// Function Signature Types
export type SubmitFunction = (
endpoint: string | number,
data?: unknown[] | Record<string, unknown>,
event_data?: unknown,
trigger_id?: number | null
) => SubmitIterable<GradioEvent>;
export type PredictFunction = (
endpoint: string | number,
data?: unknown[] | Record<string, unknown>,
event_data?: unknown
) => Promise<PredictReturn>;
export type client_return = {
config: Config | undefined;
predict: PredictFunction;
submit: SubmitFunction;
component_server: (
component_id: number,
fn_name: string,
data: unknown[]
) => any;
view_api: (_fetch: typeof fetch) => Promise<ApiInfo<JsApiData>>;
};
export interface SubmitIterable<T> extends AsyncIterable<T> {
[Symbol.asyncIterator](): AsyncIterator<T>;
cancel: () => Promise<void>;
event_id: () => string;
}
export type PredictReturn = {
type: EventType;
time: Date;
data: unknown;
endpoint: string;
fn_index: number;
};
// Space Status Types
export type SpaceStatus = SpaceStatusNormal | SpaceStatusError;
export interface SpaceStatusNormal {
status:
| "sleeping"
| "running"
| "building"
| "error"
| "stopped"
| "starting";
detail:
| "SLEEPING"
| "RUNNING"
| "RUNNING_BUILDING"
| "BUILDING"
| "APP_STARTING"
| "NOT_FOUND";
load_status: "pending" | "error" | "complete" | "generating";
message: string;
}
export interface SpaceStatusError {
status: "space_error" | "paused";
detail:
| "NO_APP_FILE"
| "CONFIG_ERROR"
| "BUILD_ERROR"
| "RUNTIME_ERROR"
| "PAUSED";
load_status: "error";
message: string;
discussions_enabled: boolean;
}
export type SpaceStatusCallback = (a: SpaceStatus) => void;
// Configuration and Response Types
// --------------------------------
export interface Config {
deep_link_state?: "none" | "valid" | "invalid";
auth_required?: true;
analytics_enabled: boolean;
connect_heartbeat: boolean;
auth_message: string;
components: ComponentMeta[];
css: string | null;
js: string | null;
head: string | null;
dependencies: Dependency[];
dev_mode: boolean;
enable_queue: boolean;
show_error: boolean;
layout: any;
mode: "blocks" | "interface";
root: string;
root_url?: string;
theme: string;
title: string;
version: string;
space_id: string | null;
is_space: boolean;
is_colab: boolean;
show_api: boolean;
stylesheets: string[];
current_page: string;
page: Record<
string,
{
components: number[];
dependencies: number[];
layout: any;
}
>;
pages: [string, string][];
protocol: "sse_v3" | "sse_v2.1" | "sse_v2" | "sse_v1" | "sse" | "ws";
max_file_size?: number;
theme_hash?: number;
username: string | null;
api_prefix?: string;
fill_height?: boolean;
fill_width?: boolean;
pwa?: boolean;
i18n_translations?: Record<string, Record<string, string>> | null;
mcp_server?: boolean;
}
// todo: DRY up types
export interface ComponentMeta {
type: string;
id: number;
has_modes: boolean;
props: SharedProps;
instance: SvelteComponent;
component: ComponentType<SvelteComponent>;
documentation?: Documentation;
children?: ComponentMeta[];
parent?: ComponentMeta;
value?: any;
component_class_id: string;
key: string | number | null;
rendered_in?: number;
}
interface SharedProps {
elem_id?: string;
elem_classes?: string[];
components?: string[];
server_fns?: string[];
interactive: boolean;
[key: string]: unknown;
root_url?: string;
}
export interface Documentation {
type?: TypeDescription;
description?: TypeDescription;
example_data?: string;
}
interface TypeDescription {
input_payload?: string;
response_object?: string;
payload?: string;
}
export interface Dependency {
id: number;
targets: [number, string][];
inputs: number[];
outputs: number[];
backend_fn: boolean;
js: string | null;
scroll_to_output: boolean;
trigger: "click" | "load" | string;
max_batch_size: number;
show_progress: "full" | "minimal" | "hidden";
show_progress_on: number[] | null;
frontend_fn: ((...args: unknown[]) => Promise<unknown[]>) | null;
status?: string;
queue: boolean | null;
every: number | null;
batch: boolean;
api_name: string | null;
cancels: number[];
types: DependencyTypes;
collects_event_data: boolean;
pending_request?: boolean;
trigger_after?: number;
trigger_only_on_success?: boolean;
trigger_mode: "once" | "multiple" | "always_last";
final_event: Payload | null;
show_api: boolean;
rendered_in: number | null;
render_id: number | null;
connection: "stream" | "sse";
time_limit: number;
stream_every: number;
like_user_message: boolean;
event_specific_args: string[];
js_implementation: string | null;
}
export interface DependencyTypes {
generator: boolean;
cancel: boolean;
}
export interface Payload {
fn_index: number;
data: unknown[];
time?: Date;
event_data?: unknown;
trigger_id?: number | null;
}
export interface PostResponse {
error?: string;
[x: string]: any;
}
export interface UploadResponse {
error?: string;
files?: string[];
}
// Client and File Handling Types
export interface DuplicateOptions extends ClientOptions {
private?: boolean;
hardware?: (typeof hardware_types)[number];
timeout?: number;
}
export interface ClientOptions {
hf_token?: `hf_${string}`;
status_callback?: SpaceStatusCallback | null;
auth?: [string, string] | null;
with_null_state?: boolean;
events?: EventType[];
headers?: Record<string, string>;
query_params?: Record<string, string>;
session_hash?: string;
}
export interface FileData {
name: string;
orig_name?: string;
size?: number;
data: string;
blob?: File;
is_file?: boolean;
mime_type?: string;
alt_text?: string;
}
// Event and Listener Types
export type EventType = "data" | "status" | "log" | "render";
export interface EventMap {
data: PayloadMessage;
status: StatusMessage;
log: LogMessage;
render: RenderMessage;
}
export type GradioEvent = {
[P in EventType]: EventMap[P];
}[EventType];
export interface Log {
log: string;
title: string;
level: "warning" | "info" | "success";
}
export interface Render {
data: {
components: any[];
layout: any;
dependencies: Dependency[];
render_id: number;
};
}
export interface Status {
queue: boolean;
code?: string;
success?: boolean;
stage: "pending" | "error" | "complete" | "generating" | "streaming";
duration?: number;
visible?: boolean;
broken?: boolean;
size?: number;
position?: number;
eta?: number;
title?: string;
message?: string;
progress_data?: {
progress: number | null;
index: number | null;
length: number | null;
unit: string | null;
desc: string | null;
}[];
time?: Date;
changed_state_ids?: number[];
time_limit?: number;
}
export interface StatusMessage extends Status {
type: "status";
endpoint: string;
fn_index: number;
original_msg?: string;
}
export interface PayloadMessage extends Payload {
type: "data";
endpoint: string;
fn_index: number;
}
export interface LogMessage extends Log {
type: "log";
endpoint: string;
fn_index: number;
duration: number | null;
visible: boolean;
}
export interface RenderMessage extends Render {
type: "render";
endpoint: string;
fn_index: number;
}

View File

@ -0,0 +1,109 @@
import type { Client } from "./client";
export async function upload(
this: Client,
file_data: FileData[],
root_url: string,
upload_id?: string,
max_file_size?: number
): Promise<(FileData | null)[] | null> {
let files = (Array.isArray(file_data) ? file_data : [file_data]).map(
(file_data) => file_data.blob!
);
const oversized_files = files.filter(
(f) => f.size > (max_file_size ?? Infinity)
);
if (oversized_files.length) {
throw new Error(
`File size exceeds the maximum allowed size of ${max_file_size} bytes: ${oversized_files
.map((f) => f.name)
.join(", ")}`
);
}
return await Promise.all(
await this.upload_files(root_url, files, upload_id).then(
async (response: { files?: string[]; error?: string }) => {
if (response.error) {
throw new Error(response.error);
} else {
if (response.files) {
return response.files.map((f, i) => {
const file = new FileData({
...file_data[i],
path: f,
url: `${root_url}${this.api_prefix}/file=${f}`
});
return file;
});
}
return [];
}
}
)
);
}
export async function prepare_files(
files: File[],
is_stream?: boolean
): Promise<FileData[]> {
return files.map(
(f) =>
new FileData({
path: f.name,
orig_name: f.name,
blob: f,
size: f.size,
mime_type: f.type,
is_stream
})
);
}
export class FileData {
path: string;
url?: string;
orig_name?: string;
size?: number;
blob?: File;
is_stream?: boolean;
mime_type?: string;
alt_text?: string;
b64?: string;
readonly meta = { _type: "gradio.FileData" };
constructor({
path,
url,
orig_name,
size,
blob,
is_stream,
mime_type,
alt_text,
b64
}: {
path: string;
url?: string;
orig_name?: string;
size?: number;
blob?: File;
is_stream?: boolean;
mime_type?: string;
alt_text?: string;
b64?: string;
}) {
this.path = path;
this.url = url;
this.orig_name = orig_name;
this.size = size;
this.blob = url ? undefined : blob;
this.is_stream = is_stream;
this.mime_type = mime_type;
this.alt_text = alt_text;
this.b64 = b64;
}
}

View File

@ -0,0 +1,129 @@
import {
get_space_hardware,
hardware_types,
set_space_timeout
} from "../helpers/spaces";
import type { DuplicateOptions } from "../types";
import { Client } from "../client";
import { SPACE_METADATA_ERROR_MSG } from "../constants";
import {
get_cookie_header,
parse_and_set_cookies
} from "../helpers/init_helpers";
import { process_endpoint } from "../helpers/api_info";
export async function duplicate(
app_reference: string,
options: DuplicateOptions
): Promise<Client> {
const { hf_token, private: _private, hardware, timeout, auth } = options;
if (hardware && !hardware_types.includes(hardware)) {
throw new Error(
`Invalid hardware type provided. Valid types are: ${hardware_types
.map((v) => `"${v}"`)
.join(",")}.`
);
}
const { http_protocol, host } = await process_endpoint(
app_reference,
hf_token
);
let cookies: string[] | null = null;
if (auth) {
const cookie_header = await get_cookie_header(
http_protocol,
host,
auth,
fetch
);
if (cookie_header) cookies = parse_and_set_cookies(cookie_header);
}
const headers = {
Authorization: `Bearer ${hf_token}`,
"Content-Type": "application/json",
...(cookies ? { Cookie: cookies.join("; ") } : {})
};
const user = (
await (
await fetch(`https://huggingface.co/api/whoami-v2`, {
headers
})
).json()
).name;
const space_name = app_reference.split("/")[1];
const body: {
repository: string;
private?: boolean;
hardware?: string;
} = {
repository: `${user}/${space_name}`
};
if (_private) {
body.private = true;
}
let original_hardware;
try {
if (!hardware) {
original_hardware = await get_space_hardware(app_reference, hf_token);
}
} catch (e) {
throw Error(SPACE_METADATA_ERROR_MSG + (e as Error).message);
}
const requested_hardware = hardware || original_hardware || "cpu-basic";
body.hardware = requested_hardware;
try {
const response = await fetch(
`https://huggingface.co/api/spaces/${app_reference}/duplicate`,
{
method: "POST",
headers,
body: JSON.stringify(body)
}
);
if (response.status === 409) {
try {
const client = await Client.connect(`${user}/${space_name}`, options);
return client;
} catch (error) {
console.error("Failed to connect Client instance:", error);
throw error;
}
} else if (response.status !== 200) {
throw new Error(response.statusText);
}
const duplicated_space = await response.json();
await set_space_timeout(`${user}/${space_name}`, timeout || 300, hf_token);
return await Client.connect(
get_space_reference(duplicated_space.url),
options
);
} catch (e: any) {
throw new Error(e);
}
}
function get_space_reference(url: string): any {
const regex = /https:\/\/huggingface.co\/spaces\/([^/]+\/[^/]+)/;
const match = url.match(regex);
if (match) {
return match[1];
}
}

View File

@ -0,0 +1,140 @@
import { update_object, walk_and_store_blobs } from "../helpers/data";
import {
Command,
type ApiData,
type EndpointInfo,
type JsApiData
} from "../types";
import { FileData } from "../upload";
import type { Client } from "..";
import {
FILE_PROCESSING_ERROR_MSG,
NODEJS_FS_ERROR_MSG,
ROOT_URL_ERROR_MSG
} from "../constants";
export async function handle_blob(
this: Client,
endpoint: string,
data: unknown[],
api_info: EndpointInfo<JsApiData | ApiData>
): Promise<unknown[]> {
const self = this;
await process_local_file_commands(self, data);
const blobRefs = await walk_and_store_blobs(
data,
undefined,
[],
true,
api_info
);
const results = await Promise.all(
blobRefs.map(async ({ path, blob, type }) => {
if (!blob) return { path, type };
const response = await self.upload_files(endpoint, [blob]);
const file_url = response.files && response.files[0];
return {
path,
file_url,
type,
name:
typeof File !== "undefined" && blob instanceof File
? blob?.name
: undefined
};
})
);
results.forEach(({ path, file_url, type, name }) => {
if (type === "Gallery") {
update_object(data, file_url, path);
} else if (file_url) {
const file = new FileData({ path: file_url, orig_name: name });
update_object(data, file, path);
}
});
return data;
}
export async function process_local_file_commands(
client: Client,
data: unknown[]
): Promise<void> {
const root = client.config?.root || client.config?.root_url;
if (!root) {
throw new Error(ROOT_URL_ERROR_MSG);
}
await recursively_process_commands(client, data);
}
async function recursively_process_commands(
client: Client,
data: any,
path: string[] = []
): Promise<void> {
for (const key in data) {
if (data[key] instanceof Command) {
await process_single_command(client, data, key);
} else if (typeof data[key] === "object" && data[key] !== null) {
await recursively_process_commands(client, data[key], [...path, key]);
}
}
}
async function process_single_command(
client: Client,
data: any,
key: string
): Promise<void> {
let cmd_item = data[key] as Command;
const root = client.config?.root || client.config?.root_url;
if (!root) {
throw new Error(ROOT_URL_ERROR_MSG);
}
try {
let fileBuffer: Buffer;
let fullPath: string;
// check if running in a Node.js environment
if (
typeof process !== "undefined" &&
process.versions &&
process.versions.node
) {
const fs = await import("fs/promises");
const path = await import("path");
fullPath = path.resolve(process.cwd(), cmd_item.meta.path);
fileBuffer = await fs.readFile(fullPath); // Read file from disk
} else {
throw new Error(NODEJS_FS_ERROR_MSG);
}
const file = new Blob([fileBuffer], { type: "application/octet-stream" });
const response = await client.upload_files(root, [file]);
const file_url = response.files && response.files[0];
if (file_url) {
const fileData = new FileData({
path: file_url,
orig_name: cmd_item.meta.name || ""
});
// replace the command object with the fileData object
data[key] = fileData;
}
} catch (error) {
console.error(FILE_PROCESSING_ERROR_MSG, error);
}
}

View File

@ -0,0 +1,38 @@
import { BROKEN_CONNECTION_MSG } from "../constants";
import type { PostResponse } from "../types";
import { Client } from "..";
export async function post_data(
this: Client,
url: string,
body: unknown,
additional_headers?: any
): Promise<[PostResponse, number]> {
const headers: {
Authorization?: string;
"Content-Type": "application/json";
} = { "Content-Type": "application/json" };
if (this.options.hf_token) {
headers.Authorization = `Bearer ${this.options.hf_token}`;
}
try {
var response = await this.fetch(url, {
method: "POST",
body: JSON.stringify(body),
headers: { ...headers, ...additional_headers },
credentials: "include"
});
} catch (e) {
return [{ error: BROKEN_CONNECTION_MSG }, 500];
}
let output: PostResponse;
let status: number;
try {
output = await response.json();
status = response.status;
} catch (e) {
output = { error: `Could not parse server response: ${e}` };
status = 500;
}
return [output, status];
}

View File

@ -0,0 +1,51 @@
import { Client } from "../client";
import type { Dependency, PredictReturn } from "../types";
export async function predict(
this: Client,
endpoint: string | number,
data: unknown[] | Record<string, unknown> = {}
): Promise<PredictReturn> {
let data_returned = false;
let status_complete = false;
let dependency: Dependency;
if (!this.config) {
throw new Error("Could not resolve app config");
}
if (typeof endpoint === "number") {
dependency = this.config.dependencies.find((dep) => dep.id == endpoint)!;
} else {
const trimmed_endpoint = endpoint.replace(/^\//, "");
dependency = this.config.dependencies.find(
(dep) => dep.id == this.api_map[trimmed_endpoint]
)!;
}
return new Promise(async (resolve, reject) => {
const app = this.submit(endpoint, data, null, null, true);
let result: unknown;
for await (const message of app) {
if (message.type === "data") {
if (status_complete) {
resolve(result as PredictReturn);
}
data_returned = true;
result = message;
}
if (message.type === "status") {
if (message.stage === "error") reject(message);
if (message.stage === "complete") {
status_complete = true;
// if complete message comes after data, resolve here
if (data_returned) {
resolve(result as PredictReturn);
}
}
}
}
});
}

View File

@ -0,0 +1,228 @@
import { BROKEN_CONNECTION_MSG, SSE_URL } from "../constants";
import type { Client } from "../client";
import { stream } from "fetch-event-stream";
export async function open_stream(this: Client): Promise<void> {
let {
event_callbacks,
unclosed_events,
pending_stream_messages,
stream_status,
config,
jwt
} = this;
const that = this;
if (!config) {
throw new Error("Could not resolve app config");
}
stream_status.open = true;
let stream: EventSource | null = null;
let params = new URLSearchParams({
session_hash: this.session_hash
}).toString();
let url = new URL(`${config.root}${this.api_prefix}/${SSE_URL}?${params}`);
if (jwt) {
url.searchParams.set("__sign", jwt);
}
stream = this.stream(url);
if (!stream) {
console.warn("Cannot connect to SSE endpoint: " + url.toString());
return;
}
stream.onmessage = async function (event: MessageEvent) {
let _data = JSON.parse(event.data);
if (_data.msg === "close_stream") {
close_stream(stream_status, that.abort_controller);
return;
}
const event_id = _data.event_id;
if (!event_id) {
await Promise.all(
Object.keys(event_callbacks).map((event_id) =>
event_callbacks[event_id](_data)
)
);
} else if (event_callbacks[event_id] && config) {
if (
_data.msg === "process_completed" &&
["sse", "sse_v1", "sse_v2", "sse_v2.1", "sse_v3"].includes(
config.protocol
)
) {
unclosed_events.delete(event_id);
}
let fn: (data: any) => void = event_callbacks[event_id];
if (typeof window !== "undefined" && typeof document !== "undefined") {
// fn(_data); // need to do this to put the event on the end of the event loop, so the browser can refresh between callbacks and not freeze in case of quick generations. See
setTimeout(fn, 0, _data); // need to do this to put the event on the end of the event loop, so the browser can refresh between callbacks and not freeze in case of quick generations. See https://github.com/gradio-app/gradio/pull/7055
} else {
fn(_data);
}
} else {
if (!pending_stream_messages[event_id]) {
pending_stream_messages[event_id] = [];
}
pending_stream_messages[event_id].push(_data);
}
};
stream.onerror = async function () {
await Promise.all(
Object.keys(event_callbacks).map((event_id) =>
event_callbacks[event_id]({
msg: "unexpected_error",
message: BROKEN_CONNECTION_MSG
})
)
);
};
}
export function close_stream(
stream_status: { open: boolean },
abort_controller: AbortController | null
): void {
if (stream_status) {
stream_status.open = false;
abort_controller?.abort();
}
}
export function apply_diff_stream(
pending_diff_streams: Record<string, any[][]>,
event_id: string,
data: any
): void {
let is_first_generation = !pending_diff_streams[event_id];
if (is_first_generation) {
pending_diff_streams[event_id] = [];
data.data.forEach((value: any, i: number) => {
pending_diff_streams[event_id][i] = value;
});
} else {
data.data.forEach((value: any, i: number) => {
let new_data = apply_diff(pending_diff_streams[event_id][i], value);
pending_diff_streams[event_id][i] = new_data;
data.data[i] = new_data;
});
}
}
export function apply_diff(
obj: any,
diff: [string, (number | string)[], any][]
): any {
diff.forEach(([action, path, value]) => {
obj = apply_edit(obj, path, action, value);
});
return obj;
}
function apply_edit(
target: any,
path: (number | string)[],
action: string,
value: any
): any {
if (path.length === 0) {
if (action === "replace") {
return value;
} else if (action === "append") {
return target + value;
}
throw new Error(`Unsupported action: ${action}`);
}
let current = target;
for (let i = 0; i < path.length - 1; i++) {
current = current[path[i]];
}
const last_path = path[path.length - 1];
switch (action) {
case "replace":
current[last_path] = value;
break;
case "append":
current[last_path] += value;
break;
case "add":
if (Array.isArray(current)) {
current.splice(Number(last_path), 0, value);
} else {
current[last_path] = value;
}
break;
case "delete":
if (Array.isArray(current)) {
current.splice(Number(last_path), 1);
} else {
delete current[last_path];
}
break;
default:
throw new Error(`Unknown action: ${action}`);
}
return target;
}
export function readable_stream(
input: RequestInfo | URL,
init: RequestInit = {}
): EventSource {
const instance: EventSource & { readyState: number } = {
close: () => {
console.warn("Method not implemented.");
},
onerror: null,
onmessage: null,
onopen: null,
readyState: 0,
url: input.toString(),
withCredentials: false,
CONNECTING: 0,
OPEN: 1,
CLOSED: 2,
addEventListener: () => {
throw new Error("Method not implemented.");
},
dispatchEvent: () => {
throw new Error("Method not implemented.");
},
removeEventListener: () => {
throw new Error("Method not implemented.");
}
};
stream(input, init)
.then(async (res) => {
instance.readyState = instance.OPEN;
try {
for await (const chunk of res) {
//@ts-ignore
instance.onmessage && instance.onmessage(chunk);
}
instance.readyState = instance.CLOSED;
} catch (e) {
instance.onerror && instance.onerror(e as Event);
instance.readyState = instance.CLOSED;
}
})
.catch((e) => {
console.error(e);
instance.onerror && instance.onerror(e as Event);
instance.readyState = instance.CLOSED;
});
return instance as EventSource;
}

View File

@ -0,0 +1,862 @@
/* eslint-disable complexity */
import type {
Status,
Payload,
GradioEvent,
JsApiData,
EndpointInfo,
ApiInfo,
Config,
Dependency,
SubmitIterable
} from "../types";
import { skip_queue, post_message, handle_payload } from "../helpers/data";
import { resolve_root } from "../helpers/init_helpers";
import {
handle_message,
map_data_to_params,
process_endpoint
} from "../helpers/api_info";
import semiver from "semiver";
import {
BROKEN_CONNECTION_MSG,
QUEUE_FULL_MSG,
SSE_URL,
SSE_DATA_URL,
RESET_URL,
CANCEL_URL
} from "../constants";
import { apply_diff_stream, close_stream } from "./stream";
import { Client } from "../client";
export function submit(
this: Client,
endpoint: string | number,
data: unknown[] | Record<string, unknown> = {},
event_data?: unknown,
trigger_id?: number | null,
all_events?: boolean
): SubmitIterable<GradioEvent> {
try {
const { hf_token } = this.options;
const {
fetch,
app_reference,
config,
session_hash,
api_info,
api_map,
stream_status,
pending_stream_messages,
pending_diff_streams,
event_callbacks,
unclosed_events,
post_data,
options,
api_prefix
} = this;
const that = this;
if (!api_info) throw new Error("No API found");
if (!config) throw new Error("Could not resolve app config");
let { fn_index, endpoint_info, dependency } = get_endpoint_info(
api_info,
endpoint,
api_map,
config
);
let resolved_data = map_data_to_params(data, endpoint_info);
let websocket: WebSocket;
let stream: EventSource | null;
let protocol = config.protocol ?? "ws";
let event_id_final = "";
let event_id_cb: () => string = () => event_id_final;
const _endpoint = typeof endpoint === "number" ? "/predict" : endpoint;
let payload: Payload;
let event_id: string | null = null;
let complete: Status | undefined | false = false;
let last_status: Record<string, Status["stage"]> = {};
let url_params =
typeof window !== "undefined" && typeof document !== "undefined"
? new URLSearchParams(window.location.search).toString()
: "";
const events_to_publish =
options?.events?.reduce(
(acc, event) => {
acc[event] = true;
return acc;
},
{} as Record<string, boolean>
) || {};
// event subscription methods
function fire_event(event: GradioEvent): void {
if (all_events || events_to_publish[event.type]) {
push_event(event);
}
}
async function cancel(): Promise<void> {
const _status: Status = {
stage: "complete",
queue: false,
time: new Date()
};
complete = _status;
fire_event({
..._status,
type: "status",
endpoint: _endpoint,
fn_index: fn_index
});
let reset_request = {};
let cancel_request = {};
if (protocol === "ws") {
if (websocket && websocket.readyState === 0) {
websocket.addEventListener("open", () => {
websocket.close();
});
} else {
websocket.close();
}
reset_request = { fn_index, session_hash };
} else {
reset_request = { event_id };
cancel_request = { event_id, session_hash, fn_index };
}
try {
if (!config) {
throw new Error("Could not resolve app config");
}
if ("event_id" in cancel_request) {
await fetch(`${config.root}${api_prefix}/${CANCEL_URL}`, {
headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify(cancel_request)
});
}
await fetch(`${config.root}${api_prefix}/${RESET_URL}`, {
headers: { "Content-Type": "application/json" },
method: "POST",
body: JSON.stringify(reset_request)
});
} catch (e) {
console.warn(
"The `/reset` endpoint could not be called. Subsequent endpoint results may be unreliable."
);
}
}
const resolve_heartbeat = async (config: Config): Promise<void> => {
await this._resolve_hearbeat(config);
};
async function handle_render_config(render_config: any): Promise<void> {
if (!config) return;
let render_id: number = render_config.render_id;
config.components = [
...config.components.filter((c) => c.props.rendered_in !== render_id),
...render_config.components
];
config.dependencies = [
...config.dependencies.filter((d) => d.rendered_in !== render_id),
...render_config.dependencies
];
const any_state = config.components.some((c) => c.type === "state");
const any_unload = config.dependencies.some((d) =>
d.targets.some((t) => t[1] === "unload")
);
config.connect_heartbeat = any_state || any_unload;
await resolve_heartbeat(config);
fire_event({
type: "render",
data: render_config,
endpoint: _endpoint,
fn_index
});
}
this.handle_blob(config.root, resolved_data, endpoint_info).then(
async (_payload) => {
let input_data = handle_payload(
_payload,
dependency,
config.components,
"input",
true
);
payload = {
data: input_data || [],
event_data,
fn_index,
trigger_id
};
if (skip_queue(fn_index, config)) {
fire_event({
type: "status",
endpoint: _endpoint,
stage: "pending",
queue: false,
fn_index,
time: new Date()
});
post_data(
`${config.root}${api_prefix}/run${
_endpoint.startsWith("/") ? _endpoint : `/${_endpoint}`
}${url_params ? "?" + url_params : ""}`,
{
...payload,
session_hash
}
)
.then(([output, status_code]: any) => {
const data = output.data;
if (status_code == 200) {
fire_event({
type: "data",
endpoint: _endpoint,
fn_index,
data: handle_payload(
data,
dependency,
config.components,
"output",
options.with_null_state
),
time: new Date(),
event_data,
trigger_id
});
if (output.render_config) {
handle_render_config(output.render_config);
}
fire_event({
type: "status",
endpoint: _endpoint,
fn_index,
stage: "complete",
eta: output.average_duration,
queue: false,
time: new Date()
});
} else {
fire_event({
type: "status",
stage: "error",
endpoint: _endpoint,
fn_index,
message: output.error,
queue: false,
time: new Date()
});
}
})
.catch((e) => {
fire_event({
type: "status",
stage: "error",
message: e.message,
endpoint: _endpoint,
fn_index,
queue: false,
time: new Date()
});
});
} else if (protocol == "ws") {
const { ws_protocol, host } = await process_endpoint(
app_reference,
hf_token
);
fire_event({
type: "status",
stage: "pending",
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
let url = new URL(
`${ws_protocol}://${resolve_root(
host,
config.root as string,
true
)}/queue/join${url_params ? "?" + url_params : ""}`
);
if (this.jwt) {
url.searchParams.set("__sign", this.jwt);
}
websocket = new WebSocket(url);
websocket.onclose = (evt) => {
if (!evt.wasClean) {
fire_event({
type: "status",
stage: "error",
broken: true,
message: BROKEN_CONNECTION_MSG,
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
}
};
websocket.onmessage = function (event) {
const _data = JSON.parse(event.data);
const { type, status, data } = handle_message(
_data,
last_status[fn_index]
);
if (type === "update" && status && !complete) {
// call 'status' listeners
fire_event({
type: "status",
endpoint: _endpoint,
fn_index,
time: new Date(),
...status
});
if (status.stage === "error") {
websocket.close();
}
} else if (type === "hash") {
websocket.send(JSON.stringify({ fn_index, session_hash }));
return;
} else if (type === "data") {
websocket.send(JSON.stringify({ ...payload, session_hash }));
} else if (type === "complete") {
complete = status;
} else if (type === "log") {
fire_event({
type: "log",
title: data.title,
log: data.log,
level: data.level,
endpoint: _endpoint,
duration: data.duration,
visible: data.visible,
fn_index
});
} else if (type === "generating") {
fire_event({
type: "status",
time: new Date(),
...status,
stage: status?.stage!,
queue: true,
endpoint: _endpoint,
fn_index
});
}
if (data) {
fire_event({
type: "data",
time: new Date(),
data: handle_payload(
data.data,
dependency,
config.components,
"output",
options.with_null_state
),
endpoint: _endpoint,
fn_index,
event_data,
trigger_id
});
if (complete) {
fire_event({
type: "status",
time: new Date(),
...complete,
stage: status?.stage!,
queue: true,
endpoint: _endpoint,
fn_index
});
websocket.close();
}
}
};
// different ws contract for gradio versions older than 3.6.0
//@ts-ignore
if (semiver(config.version || "2.0.0", "3.6") < 0) {
addEventListener("open", () =>
websocket.send(JSON.stringify({ hash: session_hash }))
);
}
} else if (protocol == "sse") {
fire_event({
type: "status",
stage: "pending",
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
var params = new URLSearchParams({
fn_index: fn_index.toString(),
session_hash: session_hash
}).toString();
let url = new URL(
`${config.root}${api_prefix}/${SSE_URL}?${
url_params ? url_params + "&" : ""
}${params}`
);
if (this.jwt) {
url.searchParams.set("__sign", this.jwt);
}
stream = this.stream(url);
if (!stream) {
return Promise.reject(
new Error("Cannot connect to SSE endpoint: " + url.toString())
);
}
stream.onmessage = async function (event: MessageEvent) {
const _data = JSON.parse(event.data);
const { type, status, data } = handle_message(
_data,
last_status[fn_index]
);
if (type === "update" && status && !complete) {
// call 'status' listeners
fire_event({
type: "status",
endpoint: _endpoint,
fn_index,
time: new Date(),
...status
});
if (status.stage === "error") {
stream?.close();
close();
}
} else if (type === "data") {
let [_, status] = await post_data(
`${config.root}${api_prefix}/queue/data`,
{
...payload,
session_hash,
event_id
}
);
if (status !== 200) {
fire_event({
type: "status",
stage: "error",
message: BROKEN_CONNECTION_MSG,
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
stream?.close();
close();
}
} else if (type === "complete") {
complete = status;
} else if (type === "log") {
fire_event({
type: "log",
title: data.title,
log: data.log,
level: data.level,
endpoint: _endpoint,
duration: data.duration,
visible: data.visible,
fn_index
});
} else if (type === "generating" || type === "streaming") {
fire_event({
type: "status",
time: new Date(),
...status,
stage: status?.stage!,
queue: true,
endpoint: _endpoint,
fn_index
});
}
if (data) {
fire_event({
type: "data",
time: new Date(),
data: handle_payload(
data.data,
dependency,
config.components,
"output",
options.with_null_state
),
endpoint: _endpoint,
fn_index,
event_data,
trigger_id
});
if (complete) {
fire_event({
type: "status",
time: new Date(),
...complete,
stage: status?.stage!,
queue: true,
endpoint: _endpoint,
fn_index
});
stream?.close();
close();
}
}
};
} else if (
protocol == "sse_v1" ||
protocol == "sse_v2" ||
protocol == "sse_v2.1" ||
protocol == "sse_v3"
) {
// latest API format. v2 introduces sending diffs for intermediate outputs in generative functions, which makes payloads lighter.
// v3 only closes the stream when the backend sends the close stream message.
fire_event({
type: "status",
stage: "pending",
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
let hostname = "";
if (
typeof window !== "undefined" &&
typeof document !== "undefined"
) {
hostname = window?.location?.hostname;
}
let hfhubdev = "dev.spaces.huggingface.tech";
const origin = hostname.includes(".dev.")
? `https://moon-${hostname.split(".")[1]}.${hfhubdev}`
: `https://huggingface.co`;
const is_zerogpu_iframe =
typeof window !== "undefined" &&
typeof document !== "undefined" &&
window.parent != window &&
window.supports_zerogpu_headers;
const zerogpu_auth_promise = is_zerogpu_iframe
? post_message<Map<string, string>>("zerogpu-headers", origin)
: Promise.resolve(null);
const post_data_promise = zerogpu_auth_promise.then((headers) => {
return post_data(
`${config.root}${api_prefix}/${SSE_DATA_URL}?${url_params}`,
{
...payload,
session_hash
},
headers
);
});
post_data_promise.then(async ([response, status]: any) => {
if (status === 503) {
fire_event({
type: "status",
stage: "error",
message: QUEUE_FULL_MSG,
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
} else if (status !== 200) {
fire_event({
type: "status",
stage: "error",
message: BROKEN_CONNECTION_MSG,
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
} else {
event_id = response.event_id as string;
event_id_final = event_id;
let callback = async function (_data: object): Promise<void> {
try {
const { type, status, data, original_msg } = handle_message(
_data,
last_status[fn_index]
);
if (type == "heartbeat") {
return;
}
if (type === "update" && status && !complete) {
// call 'status' listeners
fire_event({
type: "status",
endpoint: _endpoint,
fn_index,
time: new Date(),
original_msg: original_msg,
...status
});
} else if (type === "complete") {
complete = status;
} else if (type == "unexpected_error") {
console.error("Unexpected error", status?.message);
fire_event({
type: "status",
stage: "error",
message:
status?.message || "An Unexpected Error Occurred!",
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
} else if (type === "log") {
fire_event({
type: "log",
title: data.title,
log: data.log,
level: data.level,
endpoint: _endpoint,
duration: data.duration,
visible: data.visible,
fn_index
});
return;
} else if (type === "generating" || type === "streaming") {
fire_event({
type: "status",
time: new Date(),
...status,
stage: status?.stage!,
queue: true,
endpoint: _endpoint,
fn_index
});
if (
data &&
dependency.connection !== "stream" &&
["sse_v2", "sse_v2.1", "sse_v3"].includes(protocol)
) {
apply_diff_stream(pending_diff_streams, event_id!, data);
}
}
if (data) {
fire_event({
type: "data",
time: new Date(),
data: handle_payload(
data.data,
dependency,
config.components,
"output",
options.with_null_state
),
endpoint: _endpoint,
fn_index
});
if (data.render_config) {
await handle_render_config(data.render_config);
}
if (complete) {
fire_event({
type: "status",
time: new Date(),
...complete,
stage: status?.stage!,
queue: true,
endpoint: _endpoint,
fn_index
});
close();
}
}
if (
status?.stage === "complete" ||
status?.stage === "error"
) {
if (event_callbacks[event_id!]) {
delete event_callbacks[event_id!];
}
if (event_id! in pending_diff_streams) {
delete pending_diff_streams[event_id!];
}
}
} catch (e) {
console.error("Unexpected client exception", e);
fire_event({
type: "status",
stage: "error",
message: "An Unexpected Error Occurred!",
queue: true,
endpoint: _endpoint,
fn_index,
time: new Date()
});
if (["sse_v2", "sse_v2.1", "sse_v3"].includes(protocol)) {
close_stream(stream_status, that.abort_controller);
stream_status.open = false;
close();
}
}
};
if (event_id in pending_stream_messages) {
pending_stream_messages[event_id].forEach((msg) =>
callback(msg)
);
delete pending_stream_messages[event_id];
}
// @ts-ignore
event_callbacks[event_id] = callback;
unclosed_events.add(event_id);
if (!stream_status.open) {
await this.open_stream();
}
}
});
}
}
);
let done = false;
const values: (IteratorResult<GradioEvent> | PromiseLike<never>)[] = [];
const resolvers: ((
value: IteratorResult<GradioEvent> | PromiseLike<never>
) => void)[] = [];
function close(): void {
done = true;
while (resolvers.length > 0)
(resolvers.shift() as (typeof resolvers)[0])({
value: undefined,
done: true
});
}
function push(
data: { value: GradioEvent; done: boolean } | PromiseLike<never>
): void {
if (done) return;
if (resolvers.length > 0) {
(resolvers.shift() as (typeof resolvers)[0])(data);
} else {
values.push(data);
}
}
function push_error(error: unknown): void {
push(thenable_reject(error));
close();
}
function push_event(event: GradioEvent): void {
push({ value: event, done: false });
}
function next(): Promise<IteratorResult<GradioEvent, unknown>> {
if (values.length > 0)
return Promise.resolve(values.shift() as (typeof values)[0]);
if (done) return Promise.resolve({ value: undefined, done: true });
return new Promise((resolve) => resolvers.push(resolve));
}
const iterator = {
[Symbol.asyncIterator]: () => iterator,
next,
throw: async (value: unknown) => {
push_error(value);
return next();
},
return: async () => {
close();
return next();
},
cancel,
event_id: event_id_cb
};
return iterator;
} catch (error) {
console.error("Submit function encountered an error:", error);
throw error;
}
}
function thenable_reject<T>(error: T): PromiseLike<never> {
return {
then: (
resolve: (value: never) => PromiseLike<never>,
reject: (error: T) => PromiseLike<never>
) => reject(error)
};
}
function get_endpoint_info(
api_info: ApiInfo<JsApiData>,
endpoint: string | number,
api_map: Record<string, number>,
config: Config
): {
fn_index: number;
endpoint_info: EndpointInfo<JsApiData>;
dependency: Dependency;
} {
let fn_index: number;
let endpoint_info: EndpointInfo<JsApiData>;
let dependency: Dependency;
if (typeof endpoint === "number") {
fn_index = endpoint;
endpoint_info = api_info.unnamed_endpoints[fn_index];
dependency = config.dependencies.find((dep) => dep.id == endpoint)!;
} else {
const trimmed_endpoint = endpoint.replace(/^\//, "");
fn_index = api_map[trimmed_endpoint];
endpoint_info = api_info.named_endpoints[endpoint.trim()];
dependency = config.dependencies.find(
(dep) => dep.id == api_map[trimmed_endpoint]
)!;
}
if (typeof fn_index !== "number") {
throw new Error(
"There is no endpoint matching that name of fn_index matching that number."
);
}
return { fn_index, endpoint_info, dependency };
}

View File

@ -0,0 +1,52 @@
import type { Client } from "..";
import { BROKEN_CONNECTION_MSG, UPLOAD_URL } from "../constants";
import type { UploadResponse } from "../types";
export async function upload_files(
this: Client,
root_url: string,
files: (Blob | File)[],
upload_id?: string
): Promise<UploadResponse> {
const headers: {
Authorization?: string;
} = {};
if (this?.options?.hf_token) {
headers.Authorization = `Bearer ${this.options.hf_token}`;
}
const chunkSize = 1000;
const uploadResponses = [];
let response: Response;
for (let i = 0; i < files.length; i += chunkSize) {
const chunk = files.slice(i, i + chunkSize);
const formData = new FormData();
chunk.forEach((file) => {
formData.append("files", file);
});
try {
const upload_url = upload_id
? `${root_url}${this.api_prefix}/${UPLOAD_URL}?upload_id=${upload_id}`
: `${root_url}${this.api_prefix}/${UPLOAD_URL}`;
response = await this.fetch(upload_url, {
method: "POST",
body: formData,
headers,
credentials: "include"
});
} catch (e) {
throw new Error(BROKEN_CONNECTION_MSG + (e as Error).message);
}
if (!response.ok) {
const error_text = await response.text();
return { error: `HTTP ${response.status}: ${error_text}` };
}
const output: UploadResponse["files"] = await response.json();
if (output) {
uploadResponses.push(...output);
}
}
return { files: uploadResponses };
}

View File

@ -0,0 +1,71 @@
import type { ApiInfo, ApiData } from "../types";
import semiver from "semiver";
import { API_INFO_URL, BROKEN_CONNECTION_MSG } from "../constants";
import { Client } from "../client";
import { SPACE_FETCHER_URL } from "../constants";
import { join_urls, transform_api_info } from "../helpers/api_info";
export async function view_api(this: Client): Promise<any> {
if (this.api_info) return this.api_info;
const { hf_token } = this.options;
const { config } = this;
const headers: {
Authorization?: string;
"Content-Type": "application/json";
} = { "Content-Type": "application/json" };
if (hf_token) {
headers.Authorization = `Bearer ${hf_token}`;
}
if (!config) {
return;
}
try {
let response: Response;
let api_info: ApiInfo<ApiData> | { api: ApiInfo<ApiData> };
if (typeof window !== "undefined" && window.gradio_api_info) {
api_info = window.gradio_api_info;
} else {
if (semiver(config?.version || "2.0.0", "3.30") < 0) {
response = await this.fetch(SPACE_FETCHER_URL, {
method: "POST",
body: JSON.stringify({
serialize: false,
config: JSON.stringify(config)
}),
headers,
credentials: "include"
});
} else {
const url = join_urls(config.root, this.api_prefix, API_INFO_URL);
response = await this.fetch(url, {
headers,
credentials: "include"
});
}
if (!response.ok) {
throw new Error(BROKEN_CONNECTION_MSG);
}
api_info = await response.json();
}
if ("api" in api_info) {
api_info = api_info.api;
}
if (
api_info.named_endpoints["/predict"] &&
!api_info.unnamed_endpoints["0"]
) {
api_info.unnamed_endpoints[0] = api_info.named_endpoints["/predict"];
}
return transform_api_info(api_info, config, this.api_map);
} catch (e) {
throw new Error("Could not get API info. " + (e as Error).message);
}
}

View File

@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@ -0,0 +1,27 @@
{
"include": ["src/**/*"],
"exclude": ["src/**/*.test.ts", "src/**/*.node-test.ts"],
"compilerOptions": {
"allowJs": true,
"declaration": true,
"emitDeclarationOnly": true,
"outDir": "dist",
"declarationMap": true,
"module": "ESNext",
"target": "ES2020",
"useDefineForClassFields": true,
"lib": ["ES2020", "DOM", "DOM.Iterable"],
"skipLibCheck": true,
/* Bundler */
"moduleResolution": "Bundler",
"skipDefaultLibCheck": true,
"allowImportingTsExtensions": true,
"esModuleInterop": true,
"resolveJsonModule": true,
"isolatedModules": true,
/* Linting */
"strict": true
}
}

View File

@ -0,0 +1,45 @@
import { defineConfig } from "vite";
import { svelte } from "@sveltejs/vite-plugin-svelte";
const TEST_MODE = process.env.TEST_MODE || "happy-dom";
export default defineConfig(({ mode }) => {
if (mode === "preview") {
return {
entry: "index.html"
};
}
return {
build: {
lib: {
entry: "src/index.ts",
formats: ["es"],
fileName: (format) => `index.${format}.js`
},
rollupOptions: {
input: "src/index.ts",
output: {
dir: "dist"
}
}
},
plugins: [svelte()],
mode: process.env.MODE || "development",
test: {
include: ["./src/test/*.test.*"],
environment: TEST_MODE
},
ssr: {
target: "node",
format: "esm",
noExternal: [
"ws",
"semiver",
"bufferutil",
"@gradio/upload",
"fetch-event-stream"
]
}
};
});

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,143 @@
# `gradio_client`: Use a Gradio app as an API -- in 3 lines of Python
This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API.
As an example, consider this [Hugging Face Space that transcribes audio files](https://huggingface.co/spaces/abidlabs/whisper) that are recorded from the microphone.
![](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/gradio-guides/whisper-screenshot.jpg)
Using the `gradio_client` library, we can easily use the Gradio as an API to transcribe audio files programmatically.
Here's the entire code to do it:
```python
from gradio_client import Client
client = Client("abidlabs/whisper")
client.predict("audio_sample.wav")
>> "This is a test of the whisper speech recognition model."
```
The Gradio client works with any Gradio Space, whether it be an image generator, a stateful chatbot, or a tax calculator.
## Installation
If you already have a recent version of `gradio`, then the `gradio_client` is included as a dependency.
Otherwise, the lightweight `gradio_client` package can be installed from pip (or pip3) and works with Python versions 3.10 or higher:
```bash
$ pip install gradio_client
```
## Basic Usage
### Connecting to a Space or a Gradio app
Start by connecting instantiating a `Client` object and connecting it to a Gradio app that is running on Spaces (or anywhere else)!
**Connecting to a Space**
```python
from gradio_client import Client
client = Client("abidlabs/en2fr") # a Space that translates from English to French
```
You can also connect to private Spaces by passing in your HF token with the `hf_token` parameter. You can get your HF token here: https://huggingface.co/settings/tokens
```python
from gradio_client import Client
client = Client("abidlabs/my-private-space", hf_token="...")
```
**Duplicating a Space for private use**
While you can use any public Space as an API, you may get rate limited by Hugging Face if you make too many requests. For unlimited usage of a Space, simply duplicate the Space to create a private Space,
and then use it to make as many requests as you'd like!
The `gradio_client` includes a class method: `Client.duplicate()` to make this process simple:
```python
from gradio_client import Client
client = Client.duplicate("abidlabs/whisper")
client.predict("audio_sample.wav")
>> "This is a test of the whisper speech recognition model."
```
If you have previously duplicated a Space, re-running `duplicate()` will _not_ create a new Space. Instead, the Client will attach to the previously-created Space. So it is safe to re-run the `Client.duplicate()` method multiple times.
**Note:** if the original Space uses GPUs, your private Space will as well, and your Hugging Face account will get billed based on the price of the GPU. To minimize charges, your Space will automatically go to sleep after 1 hour of inactivity. You can also set the hardware using the `hardware` parameter of `duplicate()`.
**Connecting a general Gradio app**
If your app is running somewhere else, just provide the full URL instead, including the "http://" or "https://". Here's an example of making predictions to a Gradio app that is running on a share URL:
```python
from gradio_client import Client
client = Client("https://bec81a83-5b5c-471e.gradio.live")
```
### Inspecting the API endpoints
Once you have connected to a Gradio app, you can view the APIs that are available to you by calling the `.view_api()` method. For the Whisper Space, we see the following:
```
Client.predict() Usage Info
---------------------------
Named API endpoints: 1
- predict(input_audio, api_name="/predict") -> value_0
Parameters:
- [Audio] input_audio: str (filepath or URL)
Returns:
- [Textbox] value_0: str (value)
```
This shows us that we have 1 API endpoint in this space, and shows us how to use the API endpoint to make a prediction: we should call the `.predict()` method, providing a parameter `input_audio` of type `str`, which is a `filepath or URL`.
We should also provide the `api_name='/predict'` argument. Although this isn't necessary if a Gradio app has a single named endpoint, it does allow us to call different endpoints in a single app if they are available. If an app has unnamed API endpoints, these can also be displayed by running `.view_api(all_endpoints=True)`.
### Making a prediction
The simplest way to make a prediction is simply to call the `.predict()` function with the appropriate arguments:
```python
from gradio_client import Client
client = Client("abidlabs/en2fr")
client.predict("Hello")
>> Bonjour
```
If there are multiple parameters, then you should pass them as separate arguments to `.predict()`, like this:
```python
from gradio_client import Client
client = Client("gradio/calculator")
client.predict(4, "add", 5)
>> 9.0
```
For certain inputs, such as images, you should pass in the filepath or URL to the file. Likewise, for the corresponding output types, you will get a filepath or URL returned.
```python
from gradio_client import Client
client = Client("abidlabs/whisper")
client.predict("https://audio-samples.github.io/samples/mp3/blizzard_unconditional/sample-0.mp3")
>> "My thought I have nobody by a beauty and will as you poured. Mr. Rochester is serve in that so don't find simpus, and devoted abode, to at might in a r—"
```
## Advanced Usage
For more ways to use the Gradio Python Client, check out our dedicated Guide on the Python client, available here: https://www.gradio.app/guides/getting-started-with-the-python-client

View File

@ -0,0 +1,9 @@
#!/bin/bash
set -e
cd "$(dirname ${0})"
python3 -m pip install build
rm -rf dist/*
rm -rf build/*
python3 -m build

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
from gradio_client.client import Client
from gradio_client.data_classes import FileData
from gradio_client.utils import __version__, file, handle_file
__all__ = [
"Client",
"file",
"handle_file",
"FileData",
"__version__",
]

View File

@ -0,0 +1,3 @@
from gradio_client.cli import deploy_discord
__all__ = ["deploy_discord"]

View File

@ -0,0 +1,47 @@
from typing import Annotated, Optional
from typer import Option
from gradio_client import Client
def main(
src: Annotated[
Optional[str],
Option(
help="The space id or url or gradio app you want to deploy as a gradio bot."
),
] = None,
discord_bot_token: Annotated[
str, Option(help="Discord bot token. Get one on the discord website.")
] = None,
api_names: Annotated[
list[str], Option(help="Api names to turn into discord bots")
] = None,
to_id: Annotated[
Optional[str], Option(help="Name of the space used to host the discord bot")
] = None,
hf_token: Annotated[
Optional[str],
Option(
help=(
"Hugging Face token. Can be ommitted if you are logged in via huggingface_hub cli. "
"Must be provided if upstream space is private."
)
),
] = None,
private: Annotated[
bool, Option(help="Whether the discord bot space is private.")
] = False,
):
for i, name in enumerate(api_names):
if "," in name:
api_names[i] = tuple(name.split(","))
Client(src).deploy_discord(
discord_bot_token=discord_bot_token,
api_names=api_names,
to_id=to_id,
hf_token=hf_token,
private=private,
)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,341 @@
"""This module contains the EndpointV3Compatibility class, which is used to connect to Gradio apps running 3.x.x versions of Gradio."""
from __future__ import annotations
import json
from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal
import httpx
import huggingface_hub
import websockets
from packaging import version
from gradio_client import serializing, utils
from gradio_client.exceptions import SerializationSetupError
from gradio_client.utils import (
Communicator,
)
if TYPE_CHECKING:
from gradio_client import Client
class EndpointV3Compatibility:
"""Endpoint class for connecting to v3 endpoints. Backwards compatibility."""
def __init__(self, client: Client, fn_index: int, dependency: dict, *_args):
self.client: Client = client
self.fn_index = fn_index
self.dependency = dependency
api_name = dependency.get("api_name")
self.api_name: str | Literal[False] | None = (
"/" + api_name if isinstance(api_name, str) else api_name
)
self.use_ws = self._use_websocket(self.dependency)
self.protocol = "ws" if self.use_ws else "http"
self.input_component_types = []
self.output_component_types = []
self.root_url = client.src + "/" if not client.src.endswith("/") else client.src
try:
# Only a real API endpoint if backend_fn is True (so not just a frontend function), serializers are valid,
# and api_name is not False (meaning that the developer has explicitly disabled the API endpoint)
self.serializers, self.deserializers = self._setup_serializers()
self.is_valid = self.dependency["backend_fn"] and self.api_name is not False
except SerializationSetupError:
self.is_valid = False
self.backend_fn = dependency.get("backend_fn")
self.show_api = True
def __repr__(self):
return f"Endpoint src: {self.client.src}, api_name: {self.api_name}, fn_index: {self.fn_index}"
def __str__(self):
return self.__repr__()
def make_end_to_end_fn(self, helper: Communicator | None = None):
_predict = self.make_predict(helper)
def _inner(*data):
if not self.is_valid:
raise utils.InvalidAPIEndpointError()
data = self.insert_state(*data)
data = self.serialize(*data)
predictions = _predict(*data)
predictions = self.process_predictions(*predictions)
# Append final output only if not already present
# for consistency between generators and not generators
if helper:
with helper.lock:
if not helper.job.outputs:
helper.job.outputs.append(predictions)
return predictions
return _inner
def make_cancel(self, helper: Communicator | None = None): # noqa: ARG002 (needed so that both endpoints classes have the same api)
return None
def make_predict(self, helper: Communicator | None = None):
def _predict(*data) -> tuple:
data = json.dumps(
{
"data": data,
"fn_index": self.fn_index,
"session_hash": self.client.session_hash,
}
)
hash_data = json.dumps(
{
"fn_index": self.fn_index,
"session_hash": self.client.session_hash,
}
)
if self.use_ws:
result = utils.synchronize_async(self._ws_fn, data, hash_data, helper)
if "error" in result:
raise ValueError(result["error"])
else:
response = httpx.post(
self.client.api_url,
headers=self.client.headers,
json=data,
verify=self.client.ssl_verify,
**self.client.httpx_kwargs,
)
result = json.loads(response.content.decode("utf-8"))
try:
output = result["data"]
except KeyError as ke:
is_public_space = (
self.client.space_id
and not huggingface_hub.space_info(self.client.space_id).private
)
if "error" in result and "429" in result["error"] and is_public_space:
raise utils.TooManyRequestsError(
f"Too many requests to the API, please try again later. To avoid being rate-limited, "
f"please duplicate the Space using Client.duplicate({self.client.space_id}) "
f"and pass in your Hugging Face token."
) from None
elif "error" in result:
raise ValueError(result["error"]) from None
raise KeyError(
f"Could not find 'data' key in response. Response received: {result}"
) from ke
return tuple(output)
return _predict
def _predict_resolve(self, *data) -> Any:
"""Needed for gradio.load(), which has a slightly different signature for serializing/deserializing"""
outputs = self.make_predict()(*data)
if len(self.dependency["outputs"]) == 1:
return outputs[0]
return outputs
def _upload(
self, file_paths: list[str | list[str]]
) -> list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]]:
if not file_paths:
return []
# Put all the filepaths in one file
# but then keep track of which index in the
# original list they came from so we can recreate
# the original structure
files = []
indices = []
for i, fs in enumerate(file_paths):
if not isinstance(fs, list):
fs = [fs]
for f in fs:
files.append(("files", (Path(f).name, open(f, "rb")))) # noqa: SIM115
indices.append(i)
r = httpx.post(
self.client.upload_url,
headers=self.client.headers,
files=files,
verify=self.client.ssl_verify,
**self.client.httpx_kwargs,
)
if r.status_code != 200:
uploaded = file_paths
else:
uploaded = []
result = r.json()
for i, fs in enumerate(file_paths):
if isinstance(fs, list):
output = [o for ix, o in enumerate(result) if indices[ix] == i]
res = [
{
"is_file": True,
"name": o,
"orig_name": Path(f).name,
"data": None,
}
for f, o in zip(fs, output, strict=False)
]
else:
o = next(o for ix, o in enumerate(result) if indices[ix] == i)
res = {
"is_file": True,
"name": o,
"orig_name": Path(fs).name,
"data": None,
}
uploaded.append(res)
return uploaded
def _add_uploaded_files_to_data(
self,
files: list[str | list[str]] | list[dict[str, Any] | list[dict[str, Any]]],
data: list[Any],
) -> None:
"""Helper function to modify the input data with the uploaded files."""
file_counter = 0
for i, t in enumerate(self.input_component_types):
if t in ["file", "uploadbutton"]:
data[i] = files[file_counter]
file_counter += 1
def insert_state(self, *data) -> tuple:
data = list(data)
for i, input_component_type in enumerate(self.input_component_types):
if input_component_type == utils.STATE_COMPONENT:
data.insert(i, None)
return tuple(data)
def remove_skipped_components(self, *data) -> tuple:
data = [
d
for d, oct in zip(data, self.output_component_types, strict=False)
if oct not in utils.SKIP_COMPONENTS
]
return tuple(data)
def reduce_singleton_output(self, *data) -> Any:
if (
len(
[
oct
for oct in self.output_component_types
if oct not in utils.SKIP_COMPONENTS
]
)
== 1
):
return data[0]
else:
return data
def serialize(self, *data) -> tuple:
if len(data) != len(self.serializers):
raise ValueError(
f"Expected {len(self.serializers)} arguments, got {len(data)}"
)
files = [
f
for f, t in zip(data, self.input_component_types, strict=False)
if t in ["file", "uploadbutton"]
]
uploaded_files = self._upload(files)
data = list(data)
self._add_uploaded_files_to_data(uploaded_files, data)
o = tuple(
[s.serialize(d) for s, d in zip(self.serializers, data, strict=False)]
)
return o
def deserialize(self, *data) -> tuple:
if len(data) != len(self.deserializers):
raise ValueError(
f"Expected {len(self.deserializers)} outputs, got {len(data)}"
)
outputs = tuple(
[
s.deserialize(
d,
save_dir=self.client.output_dir,
hf_token=self.client.hf_token,
root_url=self.root_url,
)
for s, d in zip(self.deserializers, data, strict=False)
]
)
return outputs
def process_predictions(self, *predictions):
if self.client.download_files:
predictions = self.deserialize(*predictions)
predictions = self.remove_skipped_components(*predictions)
predictions = self.reduce_singleton_output(*predictions)
return predictions
def _setup_serializers(
self,
) -> tuple[list[serializing.Serializable], list[serializing.Serializable]]:
inputs = self.dependency["inputs"]
serializers = []
for i in inputs:
for component in self.client.config["components"]:
if component["id"] == i:
component_name = component["type"]
self.input_component_types.append(component_name)
if component.get("serializer"):
serializer_name = component["serializer"]
if serializer_name not in serializing.SERIALIZER_MAPPING:
raise SerializationSetupError(
f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version."
)
serializer = serializing.SERIALIZER_MAPPING[serializer_name]
elif component_name in serializing.COMPONENT_MAPPING:
serializer = serializing.COMPONENT_MAPPING[component_name]
else:
raise SerializationSetupError(
f"Unknown component: {component_name}, you may need to update your gradio_client version."
)
serializers.append(serializer()) # type: ignore
outputs = self.dependency["outputs"]
deserializers = []
for i in outputs:
for component in self.client.config["components"]:
if component["id"] == i:
component_name = component["type"]
self.output_component_types.append(component_name)
if component.get("serializer"):
serializer_name = component["serializer"]
if serializer_name not in serializing.SERIALIZER_MAPPING:
raise SerializationSetupError(
f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version."
)
deserializer = serializing.SERIALIZER_MAPPING[serializer_name]
elif component_name in utils.SKIP_COMPONENTS:
deserializer = serializing.SimpleSerializable
elif component_name in serializing.COMPONENT_MAPPING:
deserializer = serializing.COMPONENT_MAPPING[component_name]
else:
raise SerializationSetupError(
f"Unknown component: {component_name}, you may need to update your gradio_client version."
)
deserializers.append(deserializer()) # type: ignore
return serializers, deserializers
def _use_websocket(self, dependency: dict) -> bool:
queue_enabled = self.client.config.get("enable_queue", False)
queue_uses_websocket = version.parse(
self.client.config.get("version", "2.0")
) >= version.Version("3.2")
dependency_uses_queue = dependency.get("queue", False) is not False
return queue_enabled and queue_uses_websocket and dependency_uses_queue
async def _ws_fn(self, data, hash_data, helper: Communicator):
async with websockets.connect( # type: ignore
self.client.ws_url,
open_timeout=10,
extra_headers=self.client.headers,
max_size=1024 * 1024 * 1024,
) as websocket:
return await utils.get_pred_from_ws(websocket, data, hash_data, helper)

View File

@ -0,0 +1,28 @@
from __future__ import annotations
from typing import Any, TypedDict
from typing_extensions import NotRequired
class FileData(TypedDict):
name: str | None # filename
data: str | None # base64 encoded data
size: NotRequired[int | None] # size in bytes
is_file: NotRequired[
bool
] # whether the data corresponds to a file or base64 encoded data
orig_name: NotRequired[str] # original filename
mime_type: NotRequired[str]
is_stream: NotRequired[bool]
class ParameterInfo(TypedDict):
label: str
parameter_name: str
parameter_has_default: NotRequired[bool]
parameter_default: NotRequired[Any]
type: dict
python_type: dict
component: str
example_input: Any

View File

@ -0,0 +1,354 @@
"""Contains methods that generate documentation for Gradio functions and classes."""
from __future__ import annotations
import dataclasses
import inspect
import warnings
from collections import defaultdict
from collections.abc import Callable
from functools import lru_cache
classes_to_document = defaultdict(list)
classes_inherit_documentation = {}
def set_documentation_group(m): # noqa: ARG001
"""A no-op for backwards compatibility of custom components published prior to 4.16.0"""
pass
def extract_instance_attr_doc(cls, attr):
code = inspect.getsource(cls.__init__)
lines = [line.strip() for line in code.split("\n")]
i = None
for i, line in enumerate(lines): # noqa: B007
if line.startswith("self." + attr + ":") or line.startswith(
"self." + attr + " ="
):
break
if i is None:
raise NameError(f"Could not find {attr} in {cls.__name__}")
start_line = lines.index('"""', i)
end_line = lines.index('"""', start_line + 1)
for j in range(i + 1, start_line):
if lines[j].startswith("self."):
raise ValueError(
f"Found another attribute before docstring for {attr} in {cls.__name__}: "
+ lines[j]
+ "\n start:"
+ lines[i]
)
doc_string = " ".join(lines[start_line + 1 : end_line])
return doc_string
_module_prefixes = [
("gradio._simple_templates", "component"),
("gradio.block", "block"),
("gradio.chat", "chatinterface"),
("gradio.component", "component"),
("gradio.events", "helpers"),
("gradio.data_classes", "helpers"),
("gradio.exceptions", "helpers"),
("gradio.external", "helpers"),
("gradio.flag", "flagging"),
("gradio.helpers", "helpers"),
("gradio.interface", "interface"),
("gradio.layout", "layout"),
("gradio.route", "routes"),
("gradio.theme", "themes"),
("gradio_client.", "py-client"),
("gradio.utils", "helpers"),
("gradio.renderable", "renderable"),
]
@lru_cache(maxsize=10)
def _get_module_documentation_group(modname) -> str:
for prefix, group in _module_prefixes:
if modname.startswith(prefix):
return group
raise ValueError(f"No known documentation group for module {modname!r}")
def document(*fns, inherit=False, documentation_group=None):
"""
Defines the @document decorator which adds classes or functions to the Gradio
documentation at www.gradio.app/docs.
Usage examples:
- Put @document() above a class to document the class and its constructor.
- Put @document("fn1", "fn2") above a class to also document methods fn1 and fn2.
- Put @document("*fn3") with an asterisk above a class to document the instance attribute methods f3.
"""
_documentation_group = documentation_group
def inner_doc(cls):
functions = list(fns)
if hasattr(cls, "EVENTS"):
functions += cls.EVENTS
if inherit:
classes_inherit_documentation[cls] = None
documentation_group = _documentation_group # avoid `nonlocal` reassignment
if _documentation_group is None:
try:
modname = inspect.getmodule(cls).__name__ # type: ignore
if modname.startswith("gradio.") or modname.startswith(
"gradio_client."
):
documentation_group = _get_module_documentation_group(modname)
else:
# Then this is likely a custom Gradio component that we do not include in the documentation
pass
except Exception as exc:
warnings.warn(f"Could not get documentation group for {cls}: {exc}")
classes_to_document[documentation_group].append((cls, functions))
return cls
return inner_doc
def document_fn(fn: Callable, cls) -> tuple[str, list[dict], dict, str | None]:
"""
Generates documentation for any function.
Parameters:
fn: Function to document
Returns:
description: General description of fn
parameters: A list of dicts for each parameter, storing data for the parameter name, annotation and doc
return: A dict storing data for the returned annotation and doc
example: Code for an example use of the fn
"""
doc_str = inspect.getdoc(fn) or ""
doc_lines = doc_str.split("\n")
signature = inspect.signature(fn)
description, parameters, returns, examples = [], {}, [], []
mode = "description"
for line in doc_lines:
line = line.rstrip()
if line == "Parameters:":
mode = "parameter"
elif line.startswith("Example:"):
mode = "example"
if "(" in line and ")" in line:
c = line.split("(")[1].split(")")[0]
if c != cls.__name__:
mode = "ignore"
elif line == "Returns:":
mode = "return"
else:
if mode == "description":
description.append(line if line.strip() else "<br>")
continue
if not (line.startswith(" ") or line.strip() == ""):
print(line)
if not (line.startswith(" ") or line.strip() == ""):
raise SyntaxError(
f"Documentation format for {fn.__name__} has format error in line: {line}"
)
line = line[4:]
if mode == "parameter":
colon_index = line.index(": ")
if colon_index < -1:
raise SyntaxError(
f"Documentation format for {fn.__name__} has format error in line: {line}"
)
parameter = line[:colon_index]
parameter_doc = line[colon_index + 2 :]
parameters[parameter] = parameter_doc
elif mode == "return":
returns.append(line)
elif mode == "example":
examples.append(line)
description_doc = " ".join(description)
parameter_docs = []
for param_name, param in signature.parameters.items():
if param_name.startswith("_"):
continue
if param_name == "self":
continue
if param_name in ["kwargs", "args"] and param_name not in parameters:
continue
parameter_doc = {
"name": param_name,
"annotation": param.annotation,
"doc": parameters.get(param_name),
}
if param_name in parameters:
del parameters[param_name]
if param.default != inspect.Parameter.empty:
default = param.default
if isinstance(default, str):
default = '"' + default + '"'
if default.__class__.__module__ != "builtins":
default = f"{default.__class__.__name__}()"
parameter_doc["default"] = default
elif parameter_doc["doc"] is not None:
if "kwargs" in parameter_doc["doc"]:
parameter_doc["kwargs"] = True
if "args" in parameter_doc["doc"]:
parameter_doc["args"] = True
parameter_docs.append(parameter_doc)
if parameters:
raise ValueError(
f"Documentation format for {fn.__name__} documents "
f"nonexistent parameters: {', '.join(parameters.keys())}. "
f"Valid parameters: {', '.join(signature.parameters.keys())}"
)
if len(returns) == 0:
return_docs = {}
elif len(returns) == 1:
return_docs = {"annotation": signature.return_annotation, "doc": returns[0]}
else:
return_docs = {}
# raise ValueError("Does not support multiple returns yet.")
examples_doc = "\n".join(examples) if len(examples) > 0 else None
return description_doc, parameter_docs, return_docs, examples_doc
def document_cls(cls):
doc_str = inspect.getdoc(cls)
if doc_str is None:
return "", {}, ""
tags = {}
description_lines = []
mode = "description"
for line in doc_str.split("\n"):
line = line.rstrip()
if line.endswith(":") and " " not in line:
mode = line[:-1].lower()
tags[mode] = []
elif line.split(" ")[0].endswith(":") and not line.startswith(" "):
tag = line[: line.index(":")].lower()
value = line[line.index(":") + 2 :]
tags[tag] = value
elif mode == "description":
description_lines.append(line if line.strip() else "<br>")
else:
if not (line.startswith(" ") or not line.strip()):
raise SyntaxError(
f"Documentation format for {cls.__name__} has format error in line: {line}"
)
tags[mode].append(line[4:])
if "example" in tags:
example = "\n".join(tags["example"])
del tags["example"]
else:
example = None
for key, val in tags.items():
if isinstance(val, list):
tags[key] = "<br>".join(val)
description = " ".join(description_lines).replace("\n", "<br>")
return description, tags, example
def generate_documentation():
documentation = {}
for mode, class_list in classes_to_document.items():
documentation[mode] = []
for cls, fns in class_list:
fn_to_document = (
cls
if inspect.isfunction(cls) or dataclasses.is_dataclass(cls)
else cls.__init__
)
_, parameter_doc, return_doc, _ = document_fn(fn_to_document, cls)
if (
hasattr(cls, "preprocess")
and callable(cls.preprocess) # type: ignore
and hasattr(cls, "postprocess")
and callable(cls.postprocess) # type: ignore
):
preprocess_doc = document_fn(cls.preprocess, cls) # type: ignore
postprocess_doc = document_fn(cls.postprocess, cls) # type: ignore
preprocess_doc, postprocess_doc = (
{
"parameter_doc": preprocess_doc[1],
"return_doc": preprocess_doc[2],
},
{
"parameter_doc": postprocess_doc[1],
"return_doc": postprocess_doc[2],
},
)
cls_description, cls_tags, cls_example = document_cls(cls)
cls_documentation = {
"class": cls,
"name": cls.__name__,
"description": cls_description,
"tags": cls_tags,
"parameters": parameter_doc,
"returns": return_doc,
"example": cls_example,
"fns": [],
}
if (
hasattr(cls, "preprocess")
and callable(cls.preprocess) # type: ignore
and hasattr(cls, "postprocess")
and callable(cls.postprocess) # type: ignore
):
cls_documentation["preprocess"] = preprocess_doc # type: ignore
cls_documentation["postprocess"] = postprocess_doc # type: ignore
for fn_name in fns:
instance_attribute_fn = fn_name.startswith("*")
if instance_attribute_fn:
fn_name = fn_name[1:]
# Instance attribute fns are classes
# whose __call__ method determines their behavior
fn = getattr(cls(), fn_name).__call__
else:
fn = getattr(cls, fn_name)
if not callable(fn):
description_doc = str(fn)
parameter_docs = {}
return_docs = {}
examples_doc = ""
override_signature = f"gr.{cls.__name__}.{fn_name}"
else:
(
description_doc,
parameter_docs,
return_docs,
examples_doc,
) = document_fn(fn, cls)
if fn_name in getattr(cls, "EVENTS", []):
parameter_docs = parameter_docs[1:]
override_signature = None
if instance_attribute_fn:
description_doc = extract_instance_attr_doc(cls, fn_name)
cls_documentation["fns"].append(
{
"fn": fn,
"name": fn_name,
"description": description_doc,
"tags": {},
"parameters": parameter_docs,
"returns": return_docs,
"example": examples_doc,
"override_signature": override_signature,
}
)
documentation[mode].append(cls_documentation)
if cls in classes_inherit_documentation:
classes_inherit_documentation[cls] = cls_documentation["fns"]
for mode, class_list in classes_to_document.items():
for i, (cls, _) in enumerate(class_list):
for super_class, fns in classes_inherit_documentation.items():
if (
inspect.isclass(cls)
and issubclass(cls, super_class)
and cls != super_class
):
for inherited_fn in fns:
inherited_fn = dict(inherited_fn)
try:
inherited_fn["description"] = extract_instance_attr_doc(
cls, inherited_fn["name"]
)
except ValueError:
pass
documentation[mode][i]["fns"].append(inherited_fn)
return documentation

View File

@ -0,0 +1,37 @@
class SerializationSetupError(ValueError):
"""Raised when a serializers cannot be set up correctly."""
pass
class AuthenticationError(ValueError):
"""Raised when the client is unable to authenticate itself to a Gradio app due to invalid or missing credentials."""
pass
class AppError(ValueError):
"""Raised when the upstream Gradio app throws an error because of the value submitted by the client."""
def __init__(
self,
message: str = "Error raised.",
duration: float | None = 10,
visible: bool = True,
title: str = "Error",
print_exception: bool = True,
):
"""
Parameters:
message: The error message to be displayed to the user. Can be HTML, which will be rendered in the modal.
duration: The duration in seconds to display the error message. If None or 0, the error message will be displayed until the user closes it.
visible: Whether the error message should be displayed in the UI.
title: The title to be displayed to the user at the top of the error modal.
print_exception: Whether to print traceback of the error to the console when the error is raised.
"""
self.title = title
self.message = message
self.duration = duration
self.visible = visible
self.print_exception = print_exception
super().__init__(self.message)

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,8 @@
{
"name": "gradio_client",
"version": "1.10.4",
"description": "",
"python": "true",
"main_changeset": true,
"private": true
}

View File

@ -0,0 +1,602 @@
"""Included for backwards compatibility with 3.x spaces/apps."""
from __future__ import annotations
import json
import os
import secrets
import tempfile
import uuid
from pathlib import Path
from typing import Any
from gradio_client import media_data, utils
from gradio_client.data_classes import FileData
with open(Path(__file__).parent / "types.json") as f:
serializer_types = json.load(f)
class Serializable:
def serialized_info(self):
"""
The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description].
Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output
"""
return self.api_info()
def api_info(self) -> dict[str, list[str]]:
"""
The typing information for this component as a dictionary whose values are a list of 2 strings: [Python type, language-agnostic description].
Keys of the dictionary are: raw_input, raw_output, serialized_input, serialized_output
"""
raise NotImplementedError()
def example_inputs(self) -> dict[str, Any]:
"""
The example inputs for this component as a dictionary whose values are example inputs compatible with this component.
Keys of the dictionary are: raw, serialized
"""
raise NotImplementedError()
# For backwards compatibility
def input_api_info(self) -> tuple[str, str]:
api_info = self.api_info()
types = api_info.get("serialized_input", [api_info["info"]["type"]] * 2) # type: ignore
return (types[0], types[1])
# For backwards compatibility
def output_api_info(self) -> tuple[str, str]:
api_info = self.api_info()
types = api_info.get("serialized_output", [api_info["info"]["type"]] * 2) # type: ignore
return (types[0], types[1])
def serialize(self, x: Any, load_dir: str | Path = "", allow_links: bool = False):
"""
Convert data from human-readable format to serialized format for a browser.
"""
return x
def deserialize(
self,
x: Any,
save_dir: str | Path | None = None,
root_url: str | None = None,
hf_token: str | None = None,
):
"""
Convert data from serialized format for a browser to human-readable format.
"""
return x
class SimpleSerializable(Serializable):
"""General class that does not perform any serialization or deserialization."""
def api_info(self) -> dict[str, bool | dict]:
return {
"info": serializer_types["SimpleSerializable"],
"serialized_info": False,
}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": None,
"serialized": None,
}
class StringSerializable(Serializable):
"""Expects a string as input/output but performs no serialization."""
def api_info(self) -> dict[str, bool | dict]:
return {
"info": serializer_types["StringSerializable"],
"serialized_info": False,
}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": "Howdy!",
"serialized": "Howdy!",
}
class ListStringSerializable(Serializable):
"""Expects a list of strings as input/output but performs no serialization."""
def api_info(self) -> dict[str, bool | dict]:
return {
"info": serializer_types["ListStringSerializable"],
"serialized_info": False,
}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": ["Howdy!", "Merhaba"],
"serialized": ["Howdy!", "Merhaba"],
}
class BooleanSerializable(Serializable):
"""Expects a boolean as input/output but performs no serialization."""
def api_info(self) -> dict[str, bool | dict]:
return {
"info": serializer_types["BooleanSerializable"],
"serialized_info": False,
}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": True,
"serialized": True,
}
class NumberSerializable(Serializable):
"""Expects a number (int/float) as input/output but performs no serialization."""
def api_info(self) -> dict[str, bool | dict]:
return {
"info": serializer_types["NumberSerializable"],
"serialized_info": False,
}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": 5,
"serialized": 5,
}
class ImgSerializable(Serializable):
"""Expects a base64 string as input/output which is serialized to a filepath."""
def serialized_info(self):
return {
"type": "string",
"description": "filepath on your computer (or URL) of image",
}
def api_info(self) -> dict[str, bool | dict]:
return {"info": serializer_types["ImgSerializable"], "serialized_info": True}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": media_data.BASE64_IMAGE,
"serialized": "https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png",
}
def serialize(
self,
x: str | None,
load_dir: str | Path = "",
allow_links: bool = False,
) -> str | None:
"""
Convert from human-friendly version of a file (string filepath) to a serialized
representation (base64).
Parameters:
x: String path to file to serialize
load_dir: Path to directory containing x
"""
if not x:
return None
if utils.is_http_url_like(x):
return utils.encode_url_to_base64(x)
return utils.encode_file_to_base64(Path(load_dir) / x)
def deserialize(
self,
x: str | None,
save_dir: str | Path | None = None,
root_url: str | None = None,
hf_token: str | None = None,
) -> str | None:
"""
Convert from serialized representation of a file (base64) to a human-friendly
version (string filepath). Optionally, save the file to the directory specified by save_dir
Parameters:
x: Base64 representation of image to deserialize into a string filepath
save_dir: Path to directory to save the deserialized image to
root_url: Ignored
hf_token: Ignored
"""
if x is None or x == "":
return None
file = utils.decode_base64_to_file(x, dir=save_dir)
return file.name
class FileSerializable(Serializable):
"""Expects a dict with base64 representation of object as input/output which is serialized to a filepath."""
def __init__(self) -> None:
self.stream = None
self.stream_name = None
super().__init__()
def serialized_info(self):
return self._single_file_serialized_info()
def _single_file_api_info(self):
return {
"info": serializer_types["SingleFileSerializable"],
"serialized_info": True,
}
def _single_file_serialized_info(self):
return {
"type": "string",
"description": "filepath on your computer (or URL) of file",
}
def _multiple_file_serialized_info(self):
return {
"type": "array",
"description": "List of filepath(s) or URL(s) to files",
"items": {
"type": "string",
"description": "filepath on your computer (or URL) of file",
},
}
def _multiple_file_api_info(self):
return {
"info": serializer_types["MultipleFileSerializable"],
"serialized_info": True,
}
def api_info(self) -> dict[str, dict | bool]:
return self._single_file_api_info()
def example_inputs(self) -> dict[str, Any]:
return self._single_file_example_inputs()
def _single_file_example_inputs(self) -> dict[str, Any]:
return {
"raw": {"is_file": False, "data": media_data.BASE64_FILE},
"serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf",
}
def _multiple_file_example_inputs(self) -> dict[str, Any]:
return {
"raw": [{"is_file": False, "data": media_data.BASE64_FILE}],
"serialized": [
"https://github.com/gradio-app/gradio/raw/main/test/test_files/sample_file.pdf"
],
}
def _serialize_single(
self,
x: str | FileData | None,
load_dir: str | Path = "",
allow_links: bool = False,
) -> FileData | None:
if x is None or isinstance(x, dict):
return x
if utils.is_http_url_like(x):
filename = x
size = None
else:
filename = str(Path(load_dir) / x)
size = Path(filename).stat().st_size
return {
"name": filename or None,
"data": None
if allow_links
else utils.encode_url_or_file_to_base64(filename),
"orig_name": Path(filename).name,
"size": size,
}
def _setup_stream(self, url, hf_token):
return utils.download_byte_stream(url, hf_token)
def _deserialize_single(
self,
x: str | FileData | None,
save_dir: str | None = None,
root_url: str | None = None,
hf_token: str | None = None,
) -> str | None:
if x is None:
return None
if isinstance(x, str):
file_name = utils.decode_base64_to_file(x, dir=save_dir).name
elif isinstance(x, dict):
if x.get("is_file"):
filepath = x.get("name")
if filepath is None:
raise ValueError(f"The 'name' field is missing in {x}")
if root_url is not None:
file_name = utils.download_tmp_copy_of_file(
root_url + "file=" + filepath,
hf_token=hf_token,
dir=save_dir,
)
else:
file_name = utils.create_tmp_copy_of_file(filepath, dir=save_dir)
elif x.get("is_stream"):
if not (x["name"] and root_url and save_dir):
raise ValueError(
"name and root_url and save_dir must all be present"
)
if not self.stream or self.stream_name != x["name"]:
self.stream = self._setup_stream(
root_url + "stream/" + x["name"], hf_token=hf_token
)
self.stream_name = x["name"]
chunk = next(self.stream)
path = Path(save_dir or tempfile.gettempdir()) / secrets.token_hex(20)
path.mkdir(parents=True, exist_ok=True)
path = path / x.get("orig_name", "output")
path.write_bytes(chunk)
file_name = str(path)
else:
data = x.get("data")
if data is None:
raise ValueError(f"The 'data' field is missing in {x}")
file_name = utils.decode_base64_to_file(data, dir=save_dir).name
else:
raise ValueError(
f"A FileSerializable component can only deserialize a string or a dict, not a {type(x)}: {x}"
)
return file_name
def serialize(
self,
x: str | FileData | None | list[str | FileData | None],
load_dir: str | Path = "",
allow_links: bool = False,
) -> FileData | None | list[FileData | None]:
"""
Convert from human-friendly version of a file (string filepath) to a
serialized representation (base64)
Parameters:
x: String path to file to serialize
load_dir: Path to directory containing x
allow_links: Will allow path returns instead of raw file content
"""
if x is None or x == "":
return None
if isinstance(x, list):
return [self._serialize_single(f, load_dir, allow_links) for f in x]
else:
return self._serialize_single(x, load_dir, allow_links)
def deserialize(
self,
x: str | FileData | None | list[str | FileData | None],
save_dir: Path | str | None = None,
root_url: str | None = None,
hf_token: str | None = None,
) -> str | None | list[str | None]:
"""
Convert from serialized representation of a file (base64) to a human-friendly
version (string filepath). Optionally, save the file to the directory specified by `save_dir`
Parameters:
x: Base64 representation of file to deserialize into a string filepath
save_dir: Path to directory to save the deserialized file to
root_url: If this component is loaded from an external Space, this is the URL of the Space.
hf_token: If this component is loaded from an external private Space, this is the access token for the Space
"""
if x is None:
return None
if isinstance(save_dir, Path):
save_dir = str(save_dir)
if isinstance(x, list):
return [
self._deserialize_single(
f, save_dir=save_dir, root_url=root_url, hf_token=hf_token
)
for f in x
]
else:
return self._deserialize_single(
x, save_dir=save_dir, root_url=root_url, hf_token=hf_token
)
class VideoSerializable(FileSerializable):
def serialized_info(self):
return {
"type": "string",
"description": "filepath on your computer (or URL) of video file",
}
def api_info(self) -> dict[str, dict | bool]:
return {"info": serializer_types["FileSerializable"], "serialized_info": True}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": {"is_file": False, "data": media_data.BASE64_VIDEO},
"serialized": "https://github.com/gradio-app/gradio/raw/main/test/test_files/video_sample.mp4",
}
def serialize(
self, x: str | None, load_dir: str | Path = "", allow_links: bool = False
) -> tuple[FileData | None, None]:
return (super().serialize(x, load_dir, allow_links), None) # type: ignore
def deserialize(
self,
x: tuple[FileData | None, FileData | None] | None,
save_dir: Path | str | None = None,
root_url: str | None = None,
hf_token: str | None = None,
) -> str | tuple[str | None, str | None] | None:
"""
Convert from serialized representation of a file (base64) to a human-friendly
version (string filepath). Optionally, save the file to the directory specified by `save_dir`
"""
if isinstance(x, (tuple, list)):
if len(x) != 2:
raise ValueError(f"Expected tuple of length 2. Received: {x}")
x_as_list = [x[0], x[1]]
else:
raise ValueError(f"Expected tuple of length 2. Received: {x}")
deserialized_file = super().deserialize(x_as_list, save_dir, root_url, hf_token) # type: ignore
if isinstance(deserialized_file, list):
return deserialized_file[0] # ignore subtitles
class JSONSerializable(Serializable):
def serialized_info(self):
return {"type": "string", "description": "filepath to JSON file"}
def api_info(self) -> dict[str, dict | bool]:
return {"info": serializer_types["JSONSerializable"], "serialized_info": True}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": {"a": 1, "b": 2},
"serialized": None,
}
def serialize(
self,
x: str | None,
load_dir: str | Path = "",
allow_links: bool = False,
) -> dict | list | None:
"""
Convert from a a human-friendly version (string path to json file) to a
serialized representation (json string)
Parameters:
x: String path to json file to read to get json string
load_dir: Path to directory containing x
"""
if x is None or x == "":
return None
return utils.file_to_json(Path(load_dir) / x)
def deserialize(
self,
x: str | dict | list,
save_dir: str | Path | None = None,
root_url: str | None = None,
hf_token: str | None = None,
) -> str | None:
"""
Convert from serialized representation (json string) to a human-friendly
version (string path to json file). Optionally, save the file to the directory specified by `save_dir`
Parameters:
x: Json string
save_dir: Path to save the deserialized json file to
root_url: Ignored
hf_token: Ignored
"""
if x is None:
return None
return utils.dict_or_str_to_json_file(x, dir=save_dir).name
class GallerySerializable(Serializable):
def serialized_info(self):
return {
"type": "string",
"description": "path to directory with images and a file associating images with captions called captions.json",
}
def api_info(self) -> dict[str, dict | bool]:
return {
"info": serializer_types["GallerySerializable"],
"serialized_info": True,
}
def example_inputs(self) -> dict[str, Any]:
return {
"raw": [media_data.BASE64_IMAGE] * 2,
"serialized": [
"https://raw.githubusercontent.com/gradio-app/gradio/main/test/test_files/bus.png",
]
* 2,
}
def serialize(
self, x: str | None, load_dir: str | Path = "", allow_links: bool = False
) -> list[list[str | None]] | None:
if x is None or x == "":
return None
files = []
captions_file = Path(x) / "captions.json"
with captions_file.open("r") as captions_json:
captions = json.load(captions_json)
for file_name, caption in captions.items():
img = FileSerializable().serialize(file_name, allow_links=allow_links)
files.append([img, caption])
return files
def deserialize(
self,
x: list[list[str | None]] | None,
save_dir: str = "",
root_url: str | None = None,
hf_token: str | None = None,
) -> None | str:
if x is None:
return None
gallery_path = Path(save_dir) / str(uuid.uuid4())
gallery_path.mkdir(exist_ok=True, parents=True)
captions = {}
for img_data in x:
if isinstance(img_data, (list, tuple)):
img_data, caption = img_data
else:
caption = None
name = FileSerializable().deserialize(
img_data, gallery_path, root_url=root_url, hf_token=hf_token
)
captions[name] = caption
captions_file = gallery_path / "captions.json"
with captions_file.open("w") as captions_json:
json.dump(captions, captions_json)
return os.path.abspath(gallery_path)
SERIALIZER_MAPPING = {}
for cls in Serializable.__subclasses__():
SERIALIZER_MAPPING[cls.__name__] = cls
for subcls in cls.__subclasses__():
SERIALIZER_MAPPING[subcls.__name__] = subcls
SERIALIZER_MAPPING["Serializable"] = SimpleSerializable
SERIALIZER_MAPPING["File"] = FileSerializable
SERIALIZER_MAPPING["UploadButton"] = FileSerializable
COMPONENT_MAPPING: dict[str, type] = {
"textbox": StringSerializable,
"number": NumberSerializable,
"slider": NumberSerializable,
"checkbox": BooleanSerializable,
"checkboxgroup": ListStringSerializable,
"radio": StringSerializable,
"dropdown": SimpleSerializable,
"image": ImgSerializable,
"video": FileSerializable,
"audio": FileSerializable,
"file": FileSerializable,
"dataframe": JSONSerializable,
"timeseries": JSONSerializable,
"fileexplorer": JSONSerializable,
"state": SimpleSerializable,
"button": StringSerializable,
"uploadbutton": FileSerializable,
"colorpicker": StringSerializable,
"label": JSONSerializable,
"highlightedtext": JSONSerializable,
"json": JSONSerializable,
"html": StringSerializable,
"gallery": GallerySerializable,
"chatbot": JSONSerializable,
"model3d": FileSerializable,
"plot": JSONSerializable,
"barplot": JSONSerializable,
"lineplot": JSONSerializable,
"scatterplot": JSONSerializable,
"markdown": StringSerializable,
"code": StringSerializable,
"annotatedimage": JSONSerializable,
}

View File

@ -0,0 +1,193 @@
import asyncio
import os
import threading
from threading import Event
from typing import Optional
import discord
import gradio as gr
from discord import Permissions
from discord.ext import commands
from discord.utils import oauth_url
import gradio_client as grc
from gradio_client.utils import QueueError
event = Event()
DISCORD_TOKEN = os.getenv("DISCORD_TOKEN")
async def wait(job):
while not job.done():
await asyncio.sleep(0.2)
def get_client(session: Optional[str] = None) -> grc.Client:
client = grc.Client("<<app-src>>", hf_token=os.getenv("HF_TOKEN"))
if session:
client.session_hash = session
return client
def truncate_response(response: str) -> str:
ending = "...\nTruncating response to 2000 characters due to discord api limits."
if len(response) > 2000:
return response[: 2000 - len(ending)] + ending
else:
return response
intents = discord.Intents.default()
intents.message_content = True
bot = commands.Bot(command_prefix="/", intents=intents)
@bot.event
async def on_ready():
print(f"Logged in as {bot.user} (ID: {bot.user.id})")
synced = await bot.tree.sync()
print(f"Synced commands: {', '.join([s.name for s in synced])}.")
event.set()
print("------")
thread_to_client = {}
thread_to_user = {}
@bot.hybrid_command(
name="<<command-name>>",
description="Enter some text to chat with the bot! Like this: /<<command-name>> Hello, how are you?",
)
async def chat(ctx, prompt: str):
if ctx.author.id == bot.user.id:
return
try:
message = await ctx.send("Creating thread...")
thread = await message.create_thread(name=prompt)
loop = asyncio.get_running_loop()
client = await loop.run_in_executor(None, get_client, None)
job = client.submit(prompt, api_name="/<<api-name>>")
await wait(job)
try:
job.result()
response = job.outputs()[-1]
await thread.send(truncate_response(response))
thread_to_client[thread.id] = client
thread_to_user[thread.id] = ctx.author.id
except QueueError:
await thread.send(
"The gradio space powering this bot is really busy! Please try again later!"
)
except Exception as e:
print(f"{e}")
async def continue_chat(message):
"""Continues a given conversation based on chathistory"""
try:
client = thread_to_client[message.channel.id]
prompt = message.content
job = client.submit(prompt, api_name="/<<api-name>>")
await wait(job)
try:
job.result()
response = job.outputs()[-1]
await message.reply(truncate_response(response))
except QueueError:
await message.reply(
"The gradio space powering this bot is really busy! Please try again later!"
)
except Exception as e:
print(f"Error: {e}")
@bot.event
async def on_message(message):
"""Continue the chat"""
try:
if not message.author.bot:
if message.channel.id in thread_to_user:
if thread_to_user[message.channel.id] == message.author.id:
await continue_chat(message)
else:
await bot.process_commands(message)
except Exception as e:
print(f"Error: {e}")
# running in thread
def run_bot():
if not DISCORD_TOKEN:
print("DISCORD_TOKEN NOT SET")
event.set()
else:
bot.run(DISCORD_TOKEN)
threading.Thread(target=run_bot).start()
event.wait()
if not DISCORD_TOKEN:
welcome_message = """
## You have not specified a DISCORD_TOKEN, which means you have not created a bot account. Please follow these steps:
### 1. Go to https://discord.com/developers/applications and click 'New Application'
### 2. Give your bot a name 🤖
![](https://gradio-builds.s3.amazonaws.com/demo-files/discordbots/BotName.png)
## 3. In Settings > Bot, click the 'Reset Token' button to get a new token. Write it down and keep it safe 🔐
![](https://gradio-builds.s3.amazonaws.com/demo-files/discordbots/ResetToken.png)
## 4. Optionally make the bot public if you want anyone to be able to add it to their servers
## 5. Scroll down and enable 'Message Content Intent' under 'Priviledged Gateway Intents'
![](https://gradio-builds.s3.amazonaws.com/demo-files/discordbots/MessageContentIntent.png)
## 6. Save your changes!
## 7. The token from step 3 is the DISCORD_TOKEN. Rerun the deploy_discord command, e.g client.deploy_discord(discord_bot_token=DISCORD_TOKEN, ...), or add the token as a space secret manually.
"""
else:
permissions = Permissions(326417525824)
url = oauth_url(bot.user.id, permissions=permissions)
welcome_message = f"""
## Add this bot to your server by clicking this link:
{url}
## How to use it?
The bot can be triggered via `/<<command-name>>` followed by your text prompt.
This will create a thread with the bot's response to your text prompt.
You can reply in the thread (without `/<<command-name>>`) to continue the conversation.
In the thread, the bot will only reply to the original author of the command.
Note : Please make sure this bot's command does have the same name as another command in your server.
Note : Bot commands do not work in DMs with the bot as of now.
"""
with gr.Blocks() as demo:
gr.Markdown(
f"""
# Discord bot of <<app-src>>
{welcome_message}
"""
)
demo.launch()

View File

@ -0,0 +1,199 @@
{
"SimpleSerializable": {
"type": {},
"description": "any valid value"
},
"StringSerializable": {
"type": "string"
},
"ListStringSerializable": {
"type": "array",
"items": {
"type": "string"
}
},
"BooleanSerializable": {
"type": "boolean"
},
"NumberSerializable": {
"type": "number"
},
"ImgSerializable": {
"type": "string",
"description": "base64 representation of an image"
},
"FileSerializable": {
"oneOf": [
{
"type": "string",
"description": "filepath on your computer (or URL) of file"
},
{
"type": "object",
"properties": {
"name": { "type": "string", "description": "name of file" },
"data": {
"type": "string",
"description": "base64 representation of file"
},
"size": {
"type": "integer",
"description": "size of image in bytes"
},
"is_file": {
"type": "boolean",
"description": "true if the file has been uploaded to the server"
},
"orig_name": {
"type": "string",
"description": "original name of the file"
}
},
"required": ["name", "data"]
},
{
"type": "array",
"items": {
"anyOf": [
{
"type": "string",
"description": "filepath on your computer (or URL) of file"
},
{
"type": "object",
"properties": {
"name": { "type": "string", "description": "name of file" },
"data": {
"type": "string",
"description": "base64 representation of file"
},
"size": {
"type": "integer",
"description": "size of image in bytes"
},
"is_file": {
"type": "boolean",
"description": "true if the file has been uploaded to the server"
},
"orig_name": {
"type": "string",
"description": "original name of the file"
}
},
"required": ["name", "data"]
}
]
}
}
]
},
"SingleFileSerializable": {
"oneOf": [
{
"type": "string",
"description": "filepath on your computer (or URL) of file"
},
{
"type": "object",
"properties": {
"name": { "type": "string", "description": "name of file" },
"data": {
"type": "string",
"description": "base64 representation of file"
},
"size": {
"type": "integer",
"description": "size of image in bytes"
},
"is_file": {
"type": "boolean",
"description": "true if the file has been uploaded to the server"
},
"orig_name": {
"type": "string",
"description": "original name of the file"
}
},
"required": ["name", "data"]
}
]
},
"MultipleFileSerializable": {
"type": "array",
"items": {
"anyOf": [
{
"type": "string",
"description": "filepath on your computer (or URL) of file"
},
{
"type": "object",
"properties": {
"name": { "type": "string", "description": "name of file" },
"data": {
"type": "string",
"description": "base64 representation of file"
},
"size": {
"type": "integer",
"description": "size of image in bytes"
},
"is_file": {
"type": "boolean",
"description": "true if the file has been uploaded to the server"
},
"orig_name": {
"type": "string",
"description": "original name of the file"
}
},
"required": ["name", "data"]
}
]
}
},
"JSONSerializable": {
"type": {},
"description": "any valid json"
},
"GallerySerializable": {
"type": "array",
"items": {
"type": "array",
"items": false,
"maxSize": 2,
"minSize": 2,
"prefixItems": [
{
"type": "object",
"properties": {
"name": { "type": "string", "description": "name of file" },
"data": {
"type": "string",
"description": "base64 representation of file"
},
"size": {
"type": "integer",
"description": "size of image in bytes"
},
"is_file": {
"type": "boolean",
"description": "true if the file has been uploaded to the server"
},
"orig_name": {
"type": "string",
"description": "original name of the file"
}
},
"required": ["name", "data"]
},
{
"oneOf": [
{ "type": "string", "description": "caption of image" },
{ "type": "null" }
]
}
]
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,70 @@
[build-system]
requires = ["hatchling", "hatch-requirements-txt", "hatch-fancy-pypi-readme>=22.5.0"]
build-backend = "hatchling.build"
[project]
name = "gradio_client"
dynamic = ["version", "dependencies", "readme"]
description = "Python library for easily interacting with trained machine learning models"
license = "Apache-2.0"
requires-python = ">=3.10"
authors = [
{ name = "Abubakar Abid", email = "gradio-team@huggingface.co" },
{ name = "Ali Abid", email = "gradio-team@huggingface.co" },
{ name = "Ali Abdalla", email = "gradio-team@huggingface.co" },
{ name = "Dawood Khan", email = "gradio-team@huggingface.co" },
{ name = "Ahsen Khaliq", email = "gradio-team@huggingface.co" },
{ name = "Pete Allen", email = "gradio-team@huggingface.co" },
{ name = "Freddy Boulton", email = "gradio-team@huggingface.co" },
]
keywords = ["machine learning", "client", "API"]
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development :: User Interfaces',
]
[project.urls]
Homepage = "https://github.com/gradio-app/gradio"
[tool.hatch.version]
path = "gradio_client/package.json"
pattern = ".*\"version\":\\s*\"(?P<version>[^\"]+)\""
[tool.hatch.metadata.hooks.requirements_txt]
filename = "requirements.txt"
[tool.hatch.metadata.hooks.fancy-pypi-readme]
content-type = "text/markdown"
fragments = [
{ path = "README.md" },
]
[tool.hatch.build.targets.sdist]
include = [
"/gradio_client",
"/README.md",
"/requirements.txt",
]
[tool.ruff]
extend = "../../pyproject.toml"
[tool.ruff.lint.isort]
known-first-party = [
"gradio_client"
]
[tool.pytest.ini_options]
GRADIO_ANALYTICS_ENABLED = "False"
HF_HUB_DISABLE_TELEMETRY = "1"

Some files were not shown because too many files have changed in this diff Show More