xray918 commited on
Commit
0ad74ed
1 Parent(s): c776459

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .changeset/README.md +8 -0
  2. .changeset/afraid-papayas-own.md +5 -0
  3. .changeset/changeset.cjs +332 -0
  4. .changeset/config.json +11 -0
  5. .changeset/fix_changelogs.cjs +149 -0
  6. .changeset/silly-hairs-burn.md +5 -0
  7. .changeset/stupid-snakes-build.md +6 -0
  8. .config/.prettierignore +35 -0
  9. .config/.prettierrc.json +8 -0
  10. .config/basevite.config.ts +93 -0
  11. .config/copy_frontend.py +63 -0
  12. .config/demos.json +34 -0
  13. .config/eslint.config.js +164 -0
  14. .config/lite-builder/pyproject.toml +33 -0
  15. .config/lite-builder/src/lite_builder/__init__.py +0 -0
  16. .config/lite-builder/src/lite_builder/builder.py +5 -0
  17. .config/lite-builder/src/lite_builder/hooks.py +6 -0
  18. .config/playwright-ct.config.ts +41 -0
  19. .config/playwright-setup.js +179 -0
  20. .config/playwright.config.js +73 -0
  21. .config/playwright/index.html +12 -0
  22. .config/playwright/index.ts +2 -0
  23. .config/postcss.config.cjs +8 -0
  24. .config/setup_vite_tests.ts +8 -0
  25. .config/svelte.config.js +5 -0
  26. .config/tailwind.config.cjs +12 -0
  27. .config/vitest.config.ts +3 -0
  28. .devcontainer/devcontainer.json +41 -0
  29. .dockerignore +41 -0
  30. .editorconfig +8 -0
  31. .git-blame-ignore-revs +14 -0
  32. .gitattributes +35 -0
  33. .github/ISSUE_TEMPLATE/bug_report_template.yml +69 -0
  34. .github/ISSUE_TEMPLATE/config.yml +5 -0
  35. .github/ISSUE_TEMPLATE/feature_request.md +19 -0
  36. .github/PULL_REQUEST_TEMPLATE.md +18 -0
  37. .github/actions/changes/action.yml +78 -0
  38. .github/actions/install-all-deps/action.yml +92 -0
  39. .github/actions/install-frontend-deps/action.yml +51 -0
  40. .github/configs/semgrep_rules.yaml +110 -0
  41. .github/filters.json +51 -0
  42. .github/stale +17 -0
  43. .github/workflows/comment-queue.yml +37 -0
  44. .github/workflows/delete-stale-spaces.yml +42 -0
  45. .github/workflows/generate-changeset.yml +96 -0
  46. .github/workflows/npm-previews.yml +53 -0
  47. .github/workflows/previews-build.yml +90 -0
  48. .github/workflows/previews-deploy.yml +176 -0
  49. .github/workflows/publish.yml +80 -0
  50. .github/workflows/semgrep.yml +68 -0
.changeset/README.md ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ # Changesets
2
+
3
+ Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
4
+ with multi-package repos, or single-package repos to help you version and publish your code. You can
5
+ find the full documentation for it [in our repository](https://github.com/changesets/changesets)
6
+
7
+ We have a quick list of common questions to get you started engaging with this project in
8
+ [our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
.changeset/afraid-papayas-own.md ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ ---
2
+ "gradio": patch
3
+ ---
4
+
5
+ feat:Tweak message shown in Colab notebooks
.changeset/changeset.cjs ADDED
@@ -0,0 +1,332 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const { getPackagesSync } = require("@manypkg/get-packages");
2
+ const dependents_graph = require("@changesets/get-dependents-graph");
3
+
4
+ const gh = require("@changesets/get-github-info");
5
+ const { existsSync, readFileSync, writeFileSync } = require("fs");
6
+ const { join } = require("path");
7
+
8
+ const { getInfo, getInfoFromPullRequest } = gh;
9
+ const pkg_data = getPackagesSync(process.cwd());
10
+ const { packages, rootDir } = pkg_data;
11
+ const dependents = dependents_graph.getDependentsGraph({
12
+ packages,
13
+ root: pkg_data.rootPackage
14
+ });
15
+
16
+ /**
17
+ * @typedef {{packageJson: {name: string, python?: boolean}, dir: string}} Package
18
+ */
19
+
20
+ /**
21
+ * @typedef {{summary: string, id: string, commit: string, releases: {name: string}}} Changeset
22
+ */
23
+
24
+ /**
25
+ *
26
+ * @param {string} package_name The name of the package to find the directories for
27
+ * @returns {string[]} The directories for the package
28
+ */
29
+ function find_packages_dirs(package_name) {
30
+ /** @type {string[]} */
31
+ let package_dirs = [];
32
+
33
+ /** @type {Package | undefined} */
34
+ const _package = packages.find((p) => p.packageJson.name === package_name);
35
+ if (!_package) throw new Error(`Package ${package_name} not found`);
36
+
37
+ package_dirs.push(_package.dir);
38
+ if (_package.packageJson.python) {
39
+ package_dirs.push(join(_package.dir, ".."));
40
+ }
41
+ return package_dirs;
42
+ }
43
+
44
+ let lines = {
45
+ _handled: []
46
+ };
47
+
48
+ const changelogFunctions = {
49
+ /**
50
+ *
51
+ * @param {Changeset[]} changesets The changesets that have been created
52
+ * @param {any} dependenciesUpdated The dependencies that have been updated
53
+ * @param {any} options The options passed to the changelog generator
54
+ * @returns {Promise<string>} The release line for the dependencies
55
+ */
56
+ getDependencyReleaseLine: async (
57
+ changesets,
58
+ dependenciesUpdated,
59
+ options
60
+ ) => {
61
+ if (!options.repo) {
62
+ throw new Error(
63
+ 'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]'
64
+ );
65
+ }
66
+ if (dependenciesUpdated.length === 0) return "";
67
+
68
+ const changesetLink = `- Updated dependencies [${(
69
+ await Promise.all(
70
+ changesets.map(async (cs) => {
71
+ if (cs.commit) {
72
+ let { links } = await getInfo({
73
+ repo: options.repo,
74
+ commit: cs.commit
75
+ });
76
+ return links.commit;
77
+ }
78
+ })
79
+ )
80
+ )
81
+ .filter((_) => _)
82
+ .join(", ")}]:`;
83
+
84
+ const updatedDepenenciesList = dependenciesUpdated.map(
85
+ /**
86
+ *
87
+ * @param {any} dependency The dependency that has been updated
88
+ * @returns {string} The formatted dependency
89
+ */
90
+ (dependency) => {
91
+ const updates = dependents.get(dependency.name);
92
+
93
+ if (updates && updates.length > 0) {
94
+ updates.forEach((update) => {
95
+ if (!lines[update]) {
96
+ lines[update] = {
97
+ dirs: find_packages_dirs(update),
98
+ current_changelog: "",
99
+ feat: [],
100
+ fix: [],
101
+ highlight: [],
102
+ previous_version: packages.find(
103
+ (p) => p.packageJson.name === update
104
+ ).packageJson.version,
105
+ dependencies: []
106
+ };
107
+
108
+ const changelog_path = join(
109
+ //@ts-ignore
110
+ lines[update].dirs[1] || lines[update].dirs[0],
111
+ "CHANGELOG.md"
112
+ );
113
+
114
+ if (existsSync(changelog_path)) {
115
+ //@ts-ignore
116
+ lines[update].current_changelog = readFileSync(
117
+ changelog_path,
118
+ "utf-8"
119
+ )
120
+ .replace(`# ${update}`, "")
121
+ .trim();
122
+ }
123
+ }
124
+ lines[update].dependencies.push(
125
+ ` - ${dependency.name}@${dependency.newVersion}`
126
+ );
127
+ });
128
+ }
129
+
130
+ return ` - ${dependency.name}@${dependency.newVersion}`;
131
+ }
132
+ );
133
+
134
+ writeFileSync(
135
+ join(rootDir, ".changeset", "_changelog.json"),
136
+ JSON.stringify(lines, null, 2)
137
+ );
138
+
139
+ return [changesetLink, ...updatedDepenenciesList].join("\n");
140
+ },
141
+ /**
142
+ *
143
+ * @param {{summary: string, id: string, commit: string, releases: {name: string}[]}} changeset The changeset that has been created
144
+ * @param {any} type The type of changeset
145
+ * @param {any} options The options passed to the changelog generator
146
+ * @returns {Promise<string>} The release line for the changeset
147
+ */
148
+ getReleaseLine: async (changeset, type, options) => {
149
+ if (!options || !options.repo) {
150
+ throw new Error(
151
+ 'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]'
152
+ );
153
+ }
154
+
155
+ let prFromSummary;
156
+ let commitFromSummary;
157
+ /**
158
+ * @type {string[]}
159
+ */
160
+ let usersFromSummary = [];
161
+
162
+ const replacedChangelog = changeset.summary
163
+ .replace(/^\s*(?:pr|pull|pull\s+request):\s*#?(\d+)/im, (_, pr) => {
164
+ let num = Number(pr);
165
+ if (!isNaN(num)) prFromSummary = num;
166
+ return "";
167
+ })
168
+ .replace(/^\s*commit:\s*([^\s]+)/im, (_, commit) => {
169
+ commitFromSummary = commit;
170
+ return "";
171
+ })
172
+ .replace(/^\s*(?:author|user):\s*@?([^\s]+)/gim, (_, user) => {
173
+ usersFromSummary.push(user);
174
+ return "";
175
+ })
176
+ .trim();
177
+
178
+ const [firstLine, ...futureLines] = replacedChangelog
179
+ .split("\n")
180
+ .map((l) => l.trimRight());
181
+
182
+ const links = await (async () => {
183
+ if (prFromSummary !== undefined) {
184
+ let { links } = await getInfoFromPullRequest({
185
+ repo: options.repo,
186
+ pull: prFromSummary
187
+ });
188
+ if (commitFromSummary) {
189
+ links = {
190
+ ...links,
191
+ commit: `[\`${commitFromSummary}\`](https://github.com/${options.repo}/commit/${commitFromSummary})`
192
+ };
193
+ }
194
+ return links;
195
+ }
196
+ const commitToFetchFrom = commitFromSummary || changeset.commit;
197
+ if (commitToFetchFrom) {
198
+ let { links } = await getInfo({
199
+ repo: options.repo,
200
+ commit: commitToFetchFrom
201
+ });
202
+ return links;
203
+ }
204
+ return {
205
+ commit: null,
206
+ pull: null,
207
+ user: null
208
+ };
209
+ })();
210
+
211
+ const user_link = /\[(@[^]+)\]/.exec(links.user);
212
+ const users =
213
+ usersFromSummary && usersFromSummary.length
214
+ ? usersFromSummary
215
+ .map((userFromSummary) => `@${userFromSummary}`)
216
+ .join(", ")
217
+ : user_link
218
+ ? user_link[1]
219
+ : links.user;
220
+
221
+ const prefix = [
222
+ links.pull === null ? "" : `${links.pull}`,
223
+ links.commit === null ? "" : `${links.commit}`
224
+ ]
225
+ .join(" ")
226
+ .trim();
227
+
228
+ const suffix = users === null ? "" : ` Thanks ${users}!`;
229
+
230
+ /**
231
+ * @typedef {{[key: string]: string[] | {dirs: string[], current_changelog: string, feat: {summary: string}[], fix: {summary: string}[], highlight: {summary: string}[]}}} ChangesetMeta
232
+ */
233
+
234
+ /**
235
+ * @type { ChangesetMeta & { _handled: string[] } }}
236
+ */
237
+
238
+ if (lines._handled.includes(changeset.id)) {
239
+ return "done";
240
+ }
241
+ lines._handled.push(changeset.id);
242
+
243
+ changeset.releases.forEach((release) => {
244
+ if (!lines[release.name]) {
245
+ lines[release.name] = {
246
+ dirs: find_packages_dirs(release.name),
247
+ current_changelog: "",
248
+ feat: [],
249
+ fix: [],
250
+ highlight: [],
251
+ previous_version: packages.find(
252
+ (p) => p.packageJson.name === release.name
253
+ ).packageJson.version,
254
+ dependencies: []
255
+ };
256
+ }
257
+
258
+ const changelog_path = join(
259
+ //@ts-ignore
260
+ lines[release.name].dirs[1] || lines[release.name].dirs[0],
261
+ "CHANGELOG.md"
262
+ );
263
+
264
+ if (existsSync(changelog_path)) {
265
+ //@ts-ignore
266
+ lines[release.name].current_changelog = readFileSync(
267
+ changelog_path,
268
+ "utf-8"
269
+ )
270
+ .replace(`# ${release.name}`, "")
271
+ .trim();
272
+ }
273
+
274
+ const [, _type, summary] = changeset.summary
275
+ .trim()
276
+ .match(/^(feat|fix|highlight)\s*:\s*([^]*)/im) || [
277
+ ,
278
+ "feat",
279
+ changeset.summary
280
+ ];
281
+
282
+ let formatted_summary = "";
283
+
284
+ if (_type === "highlight") {
285
+ const [heading, ...rest] = summary.trim().split("\n");
286
+ const _heading = `${heading} ${prefix ? `(${prefix})` : ""}`;
287
+ const _rest = rest.concat(["", suffix]);
288
+
289
+ formatted_summary = `${_heading}\n${_rest.join("\n")}`;
290
+ } else {
291
+ formatted_summary = handle_line(summary, prefix, suffix);
292
+ }
293
+
294
+ //@ts-ignore
295
+ lines[release.name][_type].push({
296
+ summary: formatted_summary
297
+ });
298
+ });
299
+
300
+ writeFileSync(
301
+ join(rootDir, ".changeset", "_changelog.json"),
302
+ JSON.stringify(lines, null, 2)
303
+ );
304
+
305
+ return `\n\n-${prefix ? `${prefix} -` : ""} ${firstLine}\n${futureLines
306
+ .map((l) => ` ${l}`)
307
+ .join("\n")}`;
308
+ }
309
+ };
310
+
311
+ /**
312
+ * @param {string} str The changelog entry
313
+ * @param {string} prefix The prefix to add to the first line
314
+ * @param {string} suffix The suffix to add to the last line
315
+ * @returns {string} The formatted changelog entry
316
+ */
317
+ function handle_line(str, prefix, suffix) {
318
+ const [_s, ...lines] = str.split("\n").filter(Boolean);
319
+
320
+ const desc = `${prefix ? `${prefix} -` : ""} ${_s.replace(
321
+ /[\s\.]$/,
322
+ ""
323
+ )}. ${suffix}`;
324
+
325
+ if (_s.length === 1) {
326
+ return desc;
327
+ }
328
+
329
+ return [desc, ...lines.map((l) => ` ${l}`)].join("/n");
330
+ }
331
+
332
+ module.exports = changelogFunctions;
.changeset/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "$schema": "https://unpkg.com/@changesets/[email protected]/schema.json",
3
+ "changelog": ["./changeset.cjs", { "repo": "gradio-app/gradio" }],
4
+ "commit": false,
5
+ "fixed": [],
6
+ "linked": [],
7
+ "access": "public",
8
+ "baseBranch": "main",
9
+ "updateInternalDependencies": "patch",
10
+ "ignore": ["@self/spaces-test", "@self/cdn-test"]
11
+ }
.changeset/fix_changelogs.cjs ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const { join } = require("path");
2
+ const { readFileSync, existsSync, writeFileSync, unlinkSync } = require("fs");
3
+ const { getPackagesSync } = require("@manypkg/get-packages");
4
+
5
+ const RE_PKG_NAME = /^[\w-]+\b/;
6
+ const pkg_meta = getPackagesSync(process.cwd());
7
+
8
+ /**
9
+ * @typedef {{dirs: string[], highlight: {summary: string}[], feat: {summary: string}[], fix: {summary: string}[], current_changelog: string}} ChangesetMeta
10
+ */
11
+
12
+ /**
13
+ * @typedef {{[key: string]: ChangesetMeta}} ChangesetMetaCollection
14
+ */
15
+
16
+ function run() {
17
+ if (!existsSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json"))) {
18
+ console.warn("No changesets to process");
19
+ return;
20
+ }
21
+
22
+ /**
23
+ * @type { ChangesetMetaCollection & { _handled: string[] } }}
24
+ */
25
+ const { _handled, ...packages } = JSON.parse(
26
+ readFileSync(
27
+ join(pkg_meta.rootDir, ".changeset", "_changelog.json"),
28
+ "utf-8"
29
+ )
30
+ );
31
+
32
+ /**
33
+ * @typedef { {packageJson: {name: string, version: string, python: boolean}, dir: string} } PackageMeta
34
+ */
35
+
36
+ /**
37
+ * @type { {[key:string]: PackageMeta} }
38
+ */
39
+ const all_packages = pkg_meta.packages.reduce((acc, pkg) => {
40
+ acc[pkg.packageJson.name] = /**@type {PackageMeta} */ (
41
+ /** @type {unknown} */ (pkg)
42
+ );
43
+ return acc;
44
+ }, /** @type {{[key:string] : PackageMeta}} */ ({}));
45
+
46
+ for (const pkg_name in packages) {
47
+ const { dirs, highlight, feat, fix, current_changelog, dependencies } =
48
+ /**@type {ChangesetMeta} */ (packages[pkg_name]);
49
+
50
+ if (pkg_name === "@gradio/lite") {
51
+ const target = all_packages.gradio.packageJson.version.split(".");
52
+
53
+ const current_version = packages[pkg_name].previous_version.split(".");
54
+
55
+ if (!packages.gradio) {
56
+ const patch = parseInt(current_version[2]) + 1;
57
+ const new_version = [target[0], target[1], patch];
58
+ all_packages[pkg_name].packageJson.version = new_version.join(".");
59
+ } else {
60
+ if (parseInt(target[1]) > parseInt(current_version[1])) {
61
+ all_packages[pkg_name].packageJson.version = target.join(".");
62
+ } else if (parseInt(target[1]) === parseInt(current_version[1])) {
63
+ const patch = parseInt(current_version[2]) + 1;
64
+ const new_version = [target[0], target[1], patch];
65
+ all_packages[pkg_name].packageJson.version = new_version.join(".");
66
+ }
67
+ }
68
+
69
+ writeFileSync(
70
+ join(all_packages[pkg_name].dir, "package.json"),
71
+ JSON.stringify(all_packages[pkg_name].packageJson, null, "\t") + "\n"
72
+ );
73
+ }
74
+
75
+ const { version, python } = all_packages[pkg_name].packageJson;
76
+
77
+ const highlights = highlight?.map((h) => `${h.summary}`) || [];
78
+ const features = feat?.map((f) => `- ${f.summary}`) || [];
79
+ const fixes = fix?.map((f) => `- ${f.summary}`) || [];
80
+ const deps = Array.from(new Set(dependencies?.map((d) => d.trim()))) || [];
81
+
82
+ const release_notes = /** @type {[string[], string][]} */ ([
83
+ [highlights, "### Highlights"],
84
+ [features, "### Features"],
85
+ [fixes, "### Fixes"],
86
+ [deps, "### Dependency updates"]
87
+ ])
88
+ .filter(([s], i) => s.length > 0)
89
+ .map(([lines, title]) => {
90
+ if (title === "### Highlights") {
91
+ return `${title}\n\n${lines.join("\n\n")}`;
92
+ }
93
+
94
+ return `${title}\n\n${lines.join("\n")}`;
95
+ })
96
+ .join("\n\n");
97
+
98
+ const new_changelog = `# ${pkg_name}
99
+
100
+ ## ${version}
101
+
102
+ ${release_notes}
103
+
104
+ ${current_changelog.replace(`# ${pkg_name}`, "").trim()}
105
+ `.trim();
106
+
107
+ dirs.forEach((dir) => {
108
+ writeFileSync(join(dir, "CHANGELOG.md"), new_changelog);
109
+ });
110
+
111
+ if (python) {
112
+ bump_local_dependents(pkg_name, version);
113
+ }
114
+ }
115
+
116
+ unlinkSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json"));
117
+
118
+ /**
119
+ * @param {string} pkg_to_bump The name of the package to bump
120
+ * @param {string} version The version to bump to
121
+ * @returns {void}
122
+ * */
123
+ function bump_local_dependents(pkg_to_bump, version) {
124
+ for (const pkg_name in all_packages) {
125
+ const {
126
+ dir,
127
+ packageJson: { python }
128
+ } = all_packages[pkg_name];
129
+
130
+ if (!python) continue;
131
+
132
+ const requirements_path = join(dir, "..", "requirements.txt");
133
+ const requirements = readFileSync(requirements_path, "utf-8").split("\n");
134
+
135
+ const pkg_index = requirements.findIndex((line) => {
136
+ const m = line.trim().match(RE_PKG_NAME);
137
+ if (!m) return false;
138
+ return m[0] === pkg_to_bump;
139
+ });
140
+
141
+ if (pkg_index !== -1) {
142
+ requirements[pkg_index] = `${pkg_to_bump}==${version}`;
143
+ writeFileSync(requirements_path, requirements.join("\n"));
144
+ }
145
+ }
146
+ }
147
+ }
148
+
149
+ run();
.changeset/silly-hairs-burn.md ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ ---
2
+ "gradio": patch
3
+ ---
4
+
5
+ feat:Fix streaming Audio/Video Output
.changeset/stupid-snakes-build.md ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ ---
2
+ "@gradio/chatbot": patch
3
+ "gradio": patch
4
+ ---
5
+
6
+ fix:Fix Audio in Chatbot bug
.config/.prettierignore ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ **/*.md
2
+ **/js/app/public/**
3
+ **/pnpm-workspace.yaml
4
+ **/js/app/dist/**
5
+ **/js/wasm/dist/**
6
+ **/js/preview/dist/**
7
+ **/client/js/dist/**
8
+ **/js/*/dist/**
9
+ **/pnpm-lock.yaml
10
+ **/js/plot/src/Plot.svelte
11
+ **/.svelte-kit/**
12
+ **/demo/**
13
+ **/gradio/**
14
+ **/.pnpm-store/**
15
+ **/.venv/**
16
+
17
+ /guides/**
18
+ **/.mypy_cache/**
19
+ !test-strategy.md
20
+ **/js/_space-test/**
21
+ ../js/lite/src/theme.css
22
+ ../js/storybook/theme.css
23
+ **/gradio_cached_examples/**
24
+ **/storybook-static/**
25
+ **/.vscode/**
26
+ sweep.yaml
27
+ **/.vercel/**
28
+ **/build/**
29
+ **/src/lib/json/**/*
30
+ **/playwright/.cache/**/*
31
+ **/theme/src/pollen.css
32
+ **/venv/**
33
+ ../js/app/src/api_docs/CodeSnippet.svelte
34
+ ../js/app/src/api_docs/RecordingSnippet.svelte
35
+ ../.changeset/pre.json
.config/.prettierrc.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "useTabs": true,
3
+ "singleQuote": false,
4
+ "trailingComma": "none",
5
+ "printWidth": 80,
6
+ "plugins": ["prettier-plugin-svelte"],
7
+ "overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
8
+ }
.config/basevite.config.ts ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig } from "vite";
2
+ import { svelte } from "@sveltejs/vite-plugin-svelte";
3
+ import sveltePreprocess from "svelte-preprocess";
4
+ // @ts-ignore
5
+ import custom_media from "postcss-custom-media";
6
+ import global_data from "@csstools/postcss-global-data";
7
+ // @ts-ignore
8
+ import prefixer from "postcss-prefix-selector";
9
+ import { readFileSync } from "fs";
10
+ import { join } from "path";
11
+ import { fileURLToPath } from "url";
12
+
13
+ const __dirname = fileURLToPath(new URL(".", import.meta.url));
14
+ const version_path = join(__dirname, "..", "gradio", "package.json");
15
+ const theme_token_path = join(
16
+ __dirname,
17
+ "..",
18
+ "js",
19
+ "theme",
20
+ "src",
21
+ "tokens.css"
22
+ );
23
+
24
+ const version = JSON.parse(readFileSync(version_path, { encoding: "utf-8" }))
25
+ .version.trim()
26
+ .replace(/\./g, "-");
27
+
28
+ //@ts-ignore
29
+ export default defineConfig(({ mode }) => {
30
+ const production = mode === "production";
31
+
32
+ return {
33
+ server: {
34
+ port: 9876
35
+ },
36
+
37
+ build: {
38
+ sourcemap: false,
39
+ target: "esnext",
40
+ minify: production,
41
+ rollupOptions: {
42
+ external: ["virtual:component-loader"]
43
+ }
44
+ },
45
+ define: {
46
+ BUILD_MODE: production ? JSON.stringify("prod") : JSON.stringify("dev"),
47
+ BACKEND_URL: production
48
+ ? JSON.stringify("")
49
+ : JSON.stringify("http://localhost:7860/"),
50
+ GRADIO_VERSION: JSON.stringify(version)
51
+ },
52
+ css: {
53
+ postcss: {
54
+ plugins: [
55
+ prefixer({
56
+ prefix: `.gradio-container-${version}`,
57
+ // @ts-ignore
58
+ transform(prefix, selector, prefixedSelector, fileName) {
59
+ if (selector.indexOf("gradio-container") > -1) {
60
+ return prefix;
61
+ } else if (
62
+ selector.indexOf(":root") > -1 ||
63
+ selector.indexOf("dark") > -1 ||
64
+ fileName.indexOf(".svelte") > -1
65
+ ) {
66
+ return selector;
67
+ }
68
+ return prefixedSelector;
69
+ }
70
+ }),
71
+ custom_media()
72
+ ]
73
+ }
74
+ },
75
+ plugins: [
76
+ svelte({
77
+ inspector: false,
78
+ compilerOptions: {
79
+ dev: !production
80
+ },
81
+ hot: !process.env.VITEST && !production,
82
+ preprocess: sveltePreprocess({
83
+ postcss: {
84
+ plugins: [
85
+ global_data({ files: [theme_token_path] }),
86
+ custom_media()
87
+ ]
88
+ }
89
+ })
90
+ })
91
+ ]
92
+ };
93
+ });
.config/copy_frontend.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import shutil
4
+ import pathlib
5
+ from typing import Any
6
+
7
+ from hatchling.builders.hooks.plugin.interface import BuildHookInterface
8
+
9
+
10
+ def copy_js_code(root: str | pathlib.Path):
11
+ NOT_COMPONENT = [
12
+ "app",
13
+ "node_modules",
14
+ "storybook",
15
+ "playwright-report",
16
+ "workbench",
17
+ "tooltils",
18
+ "component-test",
19
+ "core",
20
+ "spa",
21
+ ]
22
+ for entry in (pathlib.Path(root) / "js").iterdir():
23
+ if (
24
+ entry.is_dir()
25
+ and not str(entry.name).startswith("_")
26
+ and not str(entry.name) in NOT_COMPONENT
27
+ ):
28
+
29
+ def ignore(s, names):
30
+ ignored = []
31
+ for n in names:
32
+ if (
33
+ n.startswith("CHANGELOG")
34
+ or n.startswith("README.md")
35
+ or n.startswith("node_modules")
36
+ or ".test." in n
37
+ or ".stories." in n
38
+ or ".spec." in n
39
+ ):
40
+ ignored.append(n)
41
+ return ignored
42
+
43
+ shutil.copytree(
44
+ str(entry),
45
+ str(pathlib.Path("gradio") / "_frontend_code" / entry.name),
46
+ ignore=ignore,
47
+ dirs_exist_ok=True,
48
+ )
49
+ shutil.copytree(
50
+ str(pathlib.Path(root) / "client" / "js"),
51
+ str(pathlib.Path("gradio") / "_frontend_code" / "client"),
52
+ ignore=lambda d, names: ["node_modules", "test"],
53
+ dirs_exist_ok=True,
54
+ )
55
+
56
+
57
+ class BuildHook(BuildHookInterface):
58
+ def initialize(self, version: str, build_data: dict[str, Any]) -> None:
59
+ copy_js_code(self.root)
60
+
61
+
62
+ if __name__ == "__main__":
63
+ copy_js_code(pathlib.Path("..").resolve())
.config/demos.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ "audio_debugger",
3
+ "blocks_essay",
4
+ "blocks_group",
5
+ "blocks_js_methods",
6
+ "blocks_layout",
7
+ "blocks_multiple_event_triggers",
8
+ "blocks_update",
9
+ "calculator",
10
+ "cancel_events",
11
+ "chatbot_multimodal",
12
+ "chatinterface_streaming_echo",
13
+ "clear_components",
14
+ "code",
15
+ "fake_gan",
16
+ "fake_diffusion_with_gif",
17
+ "file_explorer_component_events",
18
+ "image_mod_default_image",
19
+ "image_editor_events",
20
+ "image_segmentation",
21
+ "interface_random_slider",
22
+ "kitchen_sink",
23
+ "kitchen_sink_random",
24
+ "matrix_transpose",
25
+ "mini_leaderboard",
26
+ "model3D",
27
+ "native_plots",
28
+ "reverse_audio",
29
+ "stt_or_tts",
30
+ "stream_audio",
31
+ "stream_frames",
32
+ "video_component",
33
+ "zip_files"
34
+ ]
.config/eslint.config.js ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import globals from "globals";
2
+ import ts_plugin from "@typescript-eslint/eslint-plugin";
3
+ import js_plugin from "@eslint/js";
4
+ import jsdoc from "eslint-plugin-jsdoc";
5
+
6
+ import typescriptParser from "@typescript-eslint/parser";
7
+ import sveltePlugin from "eslint-plugin-svelte";
8
+ import svelteParser from "svelte-eslint-parser";
9
+
10
+ const ts_rules_disabled = Object.fromEntries(
11
+ Object.keys(ts_plugin.rules).map((rule) => [
12
+ `@typescript-eslint/${rule}`,
13
+ "off"
14
+ ])
15
+ );
16
+ const js_rules_disabled = Object.fromEntries(
17
+ Object.keys(js_plugin.configs.all.rules).map((rule) => [rule, "off"])
18
+ );
19
+
20
+ const jsdoc_rules_disabled = Object.fromEntries(
21
+ Object.keys(jsdoc.configs.recommended.rules).map((rule) => [
22
+ `jsdoc/${rule}`,
23
+ "off"
24
+ ])
25
+ );
26
+
27
+ const js_rules = {
28
+ ...js_rules_disabled,
29
+ "no-console": ["error", { allow: ["warn", "error", "debug", "info"] }],
30
+ "no-constant-condition": "error",
31
+ "no-dupe-args": "error",
32
+ "no-extra-boolean-cast": "error",
33
+ "no-unexpected-multiline": "error",
34
+ "no-unreachable": "error",
35
+ "array-callback-return": "error",
36
+ complexity: "error",
37
+ "no-else-return": "error",
38
+ "no-useless-return": "error",
39
+ "no-undef": "error"
40
+ };
41
+
42
+ const ts_rules = {
43
+ ...ts_rules_disabled,
44
+ "@typescript-eslint/adjacent-overload-signatures": "error",
45
+ "@typescript-eslint/explicit-function-return-type": [
46
+ "error",
47
+ { allowExpressions: true }
48
+ ],
49
+ "@typescript-eslint/consistent-type-exports": "error",
50
+ "@typescript-eslint/ban-types": "error",
51
+ "@typescript-eslint/array-type": "error",
52
+ "@typescript-eslint/no-inferrable-types": "error"
53
+ };
54
+
55
+ const jsdoc_rules = {
56
+ ...jsdoc_rules_disabled,
57
+ "jsdoc/require-param-description": "error",
58
+ "jsdoc/require-returns-description": "error"
59
+ };
60
+
61
+ const { browser, es2021, node } = globals;
62
+
63
+ export default [
64
+ {
65
+ ignores: [
66
+ "**/.svelte-kit/**/*",
67
+ "**/node_modules/**",
68
+ "**/dist/**",
69
+ "**/.config/*",
70
+ "**/*.spec.ts",
71
+ "**/*.test.ts",
72
+ "**/*.node-test.ts",
73
+ "js/spa/test/**/*",
74
+ "**/*vite.config.ts",
75
+ "**/_website/**/*",
76
+ "**/app/**/*",
77
+ "**/_spaces-test/**/*",
78
+ "**/preview/test/**/*",
79
+ "**/component-test/**/*",
80
+ "**/js/wasm/src/webworker/**/*"
81
+ ]
82
+ },
83
+ {
84
+ files: ["**/*.js", "**/*.cjs"],
85
+ languageOptions: {
86
+ globals: {
87
+ ...browser,
88
+ ...es2021,
89
+ ...node
90
+ }
91
+ },
92
+
93
+ plugins: {
94
+ "eslint:recommended": js_plugin,
95
+ jsdoc
96
+ },
97
+ rules: { ...js_rules, ...jsdoc_rules }
98
+ },
99
+
100
+ {
101
+ files: ["**/*.ts"],
102
+ languageOptions: {
103
+ parser: typescriptParser,
104
+ parserOptions: {
105
+ project: "./tsconfig.json",
106
+ extraFileExtensions: [".svelte"]
107
+ },
108
+ globals: {
109
+ ...browser,
110
+ ...es2021,
111
+ ...node
112
+ }
113
+ },
114
+
115
+ plugins: {
116
+ "@typescript-eslint": ts_plugin,
117
+ "eslint:recommended": js_plugin,
118
+ jsdoc
119
+ },
120
+ rules: {
121
+ ...ts_rules,
122
+ ...js_rules,
123
+ ...jsdoc_rules,
124
+ "no-undef": "off"
125
+ }
126
+ },
127
+ {
128
+ files: ["**/client/js/**"],
129
+ languageOptions: {
130
+ parserOptions: {
131
+ project: "./client/js/tsconfig.json"
132
+ }
133
+ }
134
+ },
135
+ {
136
+ files: ["**/*.svelte"],
137
+ languageOptions: {
138
+ parser: svelteParser,
139
+ parserOptions: {
140
+ parser: typescriptParser,
141
+ project: "./tsconfig.json",
142
+ extraFileExtensions: [".svelte"]
143
+ },
144
+ globals: {
145
+ ...browser,
146
+ ...es2021
147
+ }
148
+ },
149
+ plugins: {
150
+ svelte: sveltePlugin,
151
+ "@typescript-eslint": ts_plugin,
152
+ "eslint:recommended": js_plugin,
153
+ jsdoc
154
+ },
155
+ rules: {
156
+ ...ts_rules,
157
+ ...js_rules,
158
+ ...jsdoc_rules,
159
+ ...sveltePlugin.configs.recommended.rules,
160
+ "svelte/no-at-html-tags": "off",
161
+ "no-undef": "off"
162
+ }
163
+ }
164
+ ];
.config/lite-builder/pyproject.toml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [build-system]
2
+ requires = ["hatchling",]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "lite-builder"
7
+ description = "Python library for easily interacting with trained machine learning models"
8
+ license = "Apache-2.0"
9
+ version = "0.0.2"
10
+ requires-python = ">=3.8"
11
+ authors = [
12
+ { name = "Abubakar Abid", email = "[email protected]" },
13
+ { name = "Ali Abid", email = "[email protected]" },
14
+ { name = "Ali Abdalla", email = "[email protected]" },
15
+ { name = "Dawood Khan", email = "[email protected]" },
16
+ { name = "Ahsen Khaliq", email = "[email protected]" },
17
+ { name = "Pete Allen", email = "[email protected]" },
18
+ { name = "Ömer Faruk Özdemir", email = "[email protected]" },
19
+ { name = "Freddy A Boulton", email = "[email protected]" },
20
+ { name = "Hannah Blair", email = "[email protected]" },
21
+ ]
22
+ keywords = ["machine learning", "reproducibility", "visualization"]
23
+
24
+ classifiers = [
25
+ 'Development Status :: 5 - Production/Stable',
26
+ ]
27
+
28
+ [tool.hatch.build]
29
+ sources = ["src"]
30
+ only-packages = true
31
+
32
+ [project.entry-points.hatch]
33
+ lite_builder = "lite_builder.hooks"
.config/lite-builder/src/lite_builder/__init__.py ADDED
File without changes
.config/lite-builder/src/lite_builder/builder.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from hatchling.builders.wheel import WheelBuilder
2
+
3
+
4
+ class LiteBuilder(WheelBuilder):
5
+ PLUGIN_NAME = 'lite'
.config/lite-builder/src/lite_builder/hooks.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from hatchling.plugin import hookimpl
2
+ from .builder import LiteBuilder
3
+
4
+ @hookimpl
5
+ def hatch_register_builder():
6
+ return LiteBuilder
.config/playwright-ct.config.ts ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig, devices } from "@playwright/experimental-ct-svelte";
2
+ import config from "./basevite.config";
3
+
4
+ /**
5
+ * See https://playwright.dev/docs/test-configuration.
6
+ */
7
+ export default defineConfig({
8
+ testDir: "../",
9
+ /* The base directory, relative to the config file, for snapshot files created with toMatchSnapshot and toHaveScreenshot. */
10
+ snapshotDir: "./__snapshots__",
11
+ /* Maximum time one test can run for. */
12
+ timeout: 10 * 1000,
13
+ /* Run tests in files in parallel */
14
+ fullyParallel: true,
15
+ /* Fail the build on CI if you accidentally left test.only in the source code. */
16
+ forbidOnly: !!process.env.CI,
17
+ /* Retry on CI only */
18
+ retries: process.env.CI ? 2 : 0,
19
+ /* Opt out of parallel tests on CI. */
20
+ workers: process.env.CI ? 1 : undefined,
21
+ /* Reporter to use. See https://playwright.dev/docs/test-reporters */
22
+ reporter: "html",
23
+ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
24
+ use: {
25
+ /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
26
+ trace: "on-first-retry",
27
+
28
+ /* Port to use for Playwright component endpoint. */
29
+ ctPort: 3100,
30
+ ctViteConfig: config({ mode: "development", command: "build" })
31
+ },
32
+ testMatch: "*.component.spec.ts",
33
+
34
+ /* Configure projects for major browsers */
35
+ projects: [
36
+ {
37
+ name: "chromium",
38
+ use: { ...devices["Desktop Chrome"] }
39
+ }
40
+ ]
41
+ });
.config/playwright-setup.js ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { spawn } from "node:child_process";
2
+ import { join, basename } from "path";
3
+ import { fileURLToPath } from "url";
4
+ import { readdirSync, writeFileSync } from "fs";
5
+ import net from "net";
6
+
7
+ import kl from "kleur";
8
+
9
+ const __dirname = fileURLToPath(new URL(".", import.meta.url));
10
+ const TEST_APP_PATH = join(__dirname, "./test.py");
11
+ const TEST_FILES_PATH = join(__dirname, "..", "js", "spa", "test");
12
+ const ROOT = join(__dirname, "..");
13
+
14
+ const test_files = readdirSync(TEST_FILES_PATH)
15
+ .filter(
16
+ (f) =>
17
+ f.endsWith("spec.ts") &&
18
+ !f.endsWith(".skip.spec.ts") &&
19
+ !f.endsWith(".component.spec.ts") &&
20
+ !f.endsWith(".reload.spec.ts")
21
+ )
22
+ .map((f) => ({
23
+ module_name: `${basename(f, ".spec.ts")}.run`,
24
+ dir_name: basename(f, ".spec.ts")
25
+ }));
26
+
27
+ export default async function global_setup() {
28
+ const verbose = process.env.GRADIO_TEST_VERBOSE;
29
+
30
+ const port = await find_free_port(7860, 8860);
31
+ process.env.GRADIO_E2E_TEST_PORT = port;
32
+
33
+ process.stdout.write(kl.yellow("\nCreating test gradio app.\n\n"));
34
+
35
+ const test_cases = [];
36
+ // check if there is a testcase file in the same directory as the test file
37
+ // if there is, append that to the file
38
+ test_files.forEach((value) => {
39
+ const test_case_dir = join(ROOT, "demo", value.dir_name);
40
+
41
+ readdirSync(test_case_dir)
42
+ .filter((f) => f.endsWith("_testcase.py"))
43
+ .forEach((f) => {
44
+ test_cases.push({
45
+ module_name: `${value.dir_name}.${basename(f, ".py")}`,
46
+ dir_name: `${value.dir_name}_${basename(f, ".py")}`
47
+ });
48
+ });
49
+ });
50
+
51
+ const all_test_files = test_files.concat(test_cases);
52
+ const test_app = make_app(all_test_files, port);
53
+ process.stdout.write(kl.yellow("App created. Starting test server.\n\n"));
54
+
55
+ process.stdout.write(kl.bgBlue(" =========================== \n"));
56
+ process.stdout.write(kl.bgBlue(" === PYTHON STARTUP LOGS === \n"));
57
+ process.stdout.write(kl.bgBlue(" =========================== \n\n"));
58
+
59
+ writeFileSync(TEST_APP_PATH, test_app);
60
+
61
+ const app = await spawn_gradio_app(TEST_APP_PATH, port, verbose);
62
+
63
+ process.stdout.write(
64
+ kl.green(`\n\nServer started. Running tests on port ${port}.\n`)
65
+ );
66
+
67
+ return () => {
68
+ process.stdout.write(kl.green(`\nTests complete, cleaning up!\n`));
69
+
70
+ kill_process(app);
71
+ };
72
+ }
73
+ const INFO_RE = /^INFO:/;
74
+
75
+ function spawn_gradio_app(app, port, verbose) {
76
+ const PORT_RE = new RegExp(`:${port}`);
77
+
78
+ return new Promise((res, rej) => {
79
+ const _process = spawn(`python`, [app], {
80
+ shell: true,
81
+ stdio: "pipe",
82
+ cwd: ROOT,
83
+ env: {
84
+ ...process.env,
85
+ PYTHONUNBUFFERED: "true",
86
+ GRADIO_ANALYTICS_ENABLED: "False",
87
+ GRADIO_IS_E2E_TEST: "1"
88
+ }
89
+ });
90
+ _process.stdout.setEncoding("utf8");
91
+
92
+ function std_out(data) {
93
+ const _data = data.toString();
94
+ const is_info = INFO_RE.test(_data);
95
+
96
+ if (is_info) {
97
+ process.stdout.write(kl.yellow(_data));
98
+ }
99
+
100
+ if (!is_info) {
101
+ process.stdout.write(`${_data}\n`);
102
+ }
103
+
104
+ if (PORT_RE.test(_data)) {
105
+ process.stdout.write(kl.bgBlue("\n =========== END =========== "));
106
+
107
+ res(_process);
108
+
109
+ if (!verbose) {
110
+ _process.stdout.off("data", std_out);
111
+ _process.stderr.off("data", std_out);
112
+ }
113
+ }
114
+ }
115
+
116
+ _process.stdout.on("data", std_out);
117
+ _process.stderr.on("data", std_out);
118
+ _process.on("exit", () => kill_process(_process));
119
+ _process.on("close", () => kill_process(_process));
120
+ _process.on("disconnect", () => kill_process(_process));
121
+ });
122
+ }
123
+
124
+ function kill_process(process) {
125
+ process.kill("SIGKILL");
126
+ }
127
+
128
+ function make_app(demos, port) {
129
+ return `
130
+ import uvicorn
131
+ from fastapi import FastAPI
132
+ import gradio as gr
133
+
134
+ ${demos.map((obj) => `from demo.${obj.module_name} import demo as ${obj.dir_name}`).join("\n")}
135
+
136
+ app = FastAPI()
137
+ ${demos
138
+ .map(
139
+ (obj) =>
140
+ `app = gr.mount_gradio_app(app, ${obj.dir_name}, path="/${obj.dir_name}", max_file_size=${
141
+ obj.dir_name == "upload_file_limit_test" ? "'15kb'" : "None"
142
+ })`
143
+ )
144
+ .join("\n")}
145
+
146
+ config = uvicorn.Config(app, port=${port}, log_level="info")
147
+ server = uvicorn.Server(config=config)
148
+ server.run()`;
149
+ }
150
+
151
+ export async function find_free_port(start_port, end_port) {
152
+ for (let port = start_port; port < end_port; port++) {
153
+ if (await is_free_port(port)) {
154
+ return port;
155
+ }
156
+ }
157
+
158
+ throw new Error(
159
+ `Could not find free ports: there were not enough ports available.`
160
+ );
161
+ }
162
+
163
+ export function is_free_port(port) {
164
+ return new Promise((accept, reject) => {
165
+ const sock = net.createConnection(port, "127.0.0.1");
166
+ sock.once("connect", () => {
167
+ sock.end();
168
+ accept(false);
169
+ });
170
+ sock.once("error", (e) => {
171
+ sock.destroy();
172
+ if (e.code === "ECONNREFUSED") {
173
+ accept(true);
174
+ } else {
175
+ reject(e);
176
+ }
177
+ });
178
+ });
179
+ }
.config/playwright.config.js ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig, devices } from "@playwright/test";
2
+
3
+ const base = defineConfig({
4
+ use: {
5
+ screenshot: "only-on-failure",
6
+ trace: "retain-on-failure",
7
+ bypassCSP: true,
8
+ launchOptions: {
9
+ args: [
10
+ "--disable-web-security",
11
+ "--use-fake-device-for-media-stream",
12
+ "--use-fake-ui-for-media-stream",
13
+ "--use-file-for-fake-audio-capture=../gradio/test_data/test_audio.wav"
14
+ ]
15
+ }
16
+ },
17
+ expect: { timeout: 10000 },
18
+ timeout: 10000,
19
+ testMatch: /.*\.spec\.ts/,
20
+ testDir: "..",
21
+ workers: process.env.CI ? 1 : undefined,
22
+ retries: 3
23
+ });
24
+
25
+ const normal = defineConfig(base, {
26
+ globalSetup: process.env.CUSTOM_TEST ? undefined : "./playwright-setup.js",
27
+ projects: [
28
+ {
29
+ name: "firefox",
30
+ use: { ...devices["Desktop Firefox"] },
31
+ testMatch: /.stream_(audio|video)_out\.spec\.ts/
32
+ },
33
+ {
34
+ name: "chrome",
35
+ use: {
36
+ ...devices["Desktop Chrome"],
37
+ permissions: ["clipboard-read", "clipboard-write", "microphone"]
38
+ },
39
+ testIgnore: /.stream_(audio|video)_out\.spec\.ts/
40
+ }
41
+ ]
42
+ });
43
+
44
+ const lite = defineConfig(base, {
45
+ webServer: {
46
+ command: "python -m http.server 8000 --directory ../js/lite",
47
+ url: "http://localhost:8000/",
48
+ reuseExistingServer: !process.env.CI
49
+ },
50
+ testMatch: [
51
+ "**/file_component_events.spec.ts",
52
+ "**/kitchen_sink.spec.ts",
53
+ "**/gallery_component_events.spec.ts",
54
+ "**/image_remote_url.spec.ts", // To detect the bugs on Lite fixed in https://github.com/gradio-app/gradio/pull/8011 and https://github.com/gradio-app/gradio/pull/8026
55
+ "**/outbreak_forecast.spec.ts" // To test matplotlib on Lite
56
+ ],
57
+ workers: 1,
58
+ retries: 3,
59
+ timeout: 60000,
60
+ projects: [
61
+ {
62
+ name: "chromium",
63
+ use: { ...devices["Desktop Chrome"] }
64
+ },
65
+ {
66
+ name: "firefox",
67
+ use: { ...devices["Desktop Firefox"] },
68
+ testIgnore: "**/kitchen_sink.*" // This test requires the camera permission but it's not supported on FireFox: https://github.com/microsoft/playwright/issues/11714
69
+ }
70
+ ]
71
+ });
72
+
73
+ export default !!process.env.GRADIO_E2E_TEST_LITE ? lite : normal;
.config/playwright/index.html ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!doctype html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
+ <title>Testing Page</title>
7
+ </head>
8
+ <body>
9
+ <div id="root"></div>
10
+ <script type="module" src="./index.ts"></script>
11
+ </body>
12
+ </html>
.config/playwright/index.ts ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ // Import styles, initialize component theme here.
2
+ // import '../src/common.css';
.config/postcss.config.cjs ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ const tailwindcss = require("tailwindcss");
2
+ const autoprefixer = require("autoprefixer");
3
+ const nested = require("tailwindcss/nesting");
4
+ const tw_config = require("./tailwind.config.cjs");
5
+
6
+ module.exports = {
7
+ plugins: [nested, tailwindcss(tw_config), autoprefixer]
8
+ };
.config/setup_vite_tests.ts ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ import type { TestingLibraryMatchers } from "@testing-library/jest-dom/matchers";
2
+ import "@testing-library/jest-dom/vitest";
3
+
4
+ declare module "vitest" {
5
+ interface Assertion<T = any>
6
+ extends jest.Matchers<void, T>,
7
+ TestingLibraryMatchers<T, void> {}
8
+ }
.config/svelte.config.js ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import { vitePreprocess } from "@sveltejs/vite-plugin-svelte";
2
+
3
+ export default {
4
+ preprocess: vitePreprocess()
5
+ };
.config/tailwind.config.cjs ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ module.exports = {
2
+ content: [
3
+ "./src/**/*.{html,js,svelte,ts}",
4
+ "**/@gradio/**/*.{html,js,svelte,ts}"
5
+ ],
6
+
7
+ theme: {
8
+ extend: {}
9
+ },
10
+
11
+ plugins: [require("@tailwindcss/forms")]
12
+ };
.config/vitest.config.ts ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import config from "../js/spa/vite.config";
2
+
3
+ export default config;
.devcontainer/devcontainer.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // See https://containers.dev
2
+ {
3
+ "name": "Python 3",
4
+ "image": "mcr.microsoft.com/devcontainers/python:1-3.9",
5
+
6
+ // See https://containers.dev/features
7
+ "features": {
8
+ "ghcr.io/devcontainers/features/git:1": {},
9
+ "ghcr.io/devcontainers/features/node:1": {},
10
+ "ghcr.io/devcontainers-contrib/features/ffmpeg-apt-get:1": {}
11
+ },
12
+
13
+ "hostRequirements": {
14
+ "cpus": 4,
15
+ "memory": "8gb",
16
+ "storage": "32gb"
17
+ },
18
+
19
+ "customizations": {
20
+ "vscode": {
21
+ "extensions": [
22
+ "ms-python.python",
23
+ "ms-python.vscode-pylance",
24
+ "ms-python.black-formatter",
25
+ "ms-toolsai.jupyter",
26
+ "esbenp.prettier-vscode",
27
+ "svelte.svelte-vscode",
28
+ "phoenisx.cssvar"
29
+ ],
30
+ "remote.autoForwardPorts": false
31
+ }
32
+ },
33
+
34
+ "forwardPorts": [7860, 9876],
35
+ "portsAttributes": {
36
+ "7860": { "label": "gradio port" },
37
+ "9876": { "label": "gradio dev port" }
38
+ },
39
+
40
+ "postCreateCommand": "export NODE_OPTIONS=\"--max-old-space-size=8192\" && chmod +x scripts/install_gradio.sh scripts/install_test_requirements.sh scripts/build_frontend.sh && ./scripts/install_gradio.sh && ./scripts/install_test_requirements.sh && ./scripts/build_frontend.sh"
41
+ }
.dockerignore ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python build
2
+ .eggs/
3
+ gradio.egg-info/*
4
+ !gradio.egg-info/requires.txt
5
+ !gradio.egg-info/PKG-INFO
6
+ dist/
7
+ *.pyc
8
+ __pycache__/
9
+ *.py[cod]
10
+ *$py.class
11
+ build/
12
+
13
+ # JS build
14
+ gradio/templates/frontend/static
15
+ gradio/templates/frontend/cdn
16
+
17
+ # Secrets
18
+ .env
19
+
20
+ # Gradio run artifacts
21
+ *.db
22
+ *.sqlite3
23
+ gradio/launches.json
24
+ gradio/hash_seed.txt
25
+
26
+ # Tests
27
+ .coverage
28
+ coverage.xml
29
+ test.txt
30
+
31
+ # Demos
32
+ demo/tmp.zip
33
+ demo/flagged
34
+ demo/files/*.avi
35
+ demo/files/*.mp4
36
+
37
+ # Etc
38
+ .idea/*
39
+ .DS_Store
40
+ *.bak
41
+ workspace.code-workspace
.editorconfig ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+
2
+ root = true
3
+
4
+ [{js/**,client/js/**}]
5
+ end_of_line = lf
6
+ insert_final_newline = true
7
+ indent_style = tab
8
+ tab_width = 2
.git-blame-ignore-revs ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/gradio-app/gradio/pull/4487 - refactor components.py to separate files
2
+ 69f36f98535c904e7cac2b4942cecc747ed7443c
3
+ # Format the codebase
4
+ cc0cff893f9d7d472788adc2510c123967b384fe
5
+ # Switch from black to ruff
6
+ 8a70e83db9c7751b46058cdd2514e6bddeef6210
7
+ # format (#4810)
8
+ 7fa5e766ce0f89f1fb84c329e62c9df9c332120a
9
+ # lint website
10
+ 4bf301324b3b180fa32166ff1774312b01334c88
11
+ # format frontend with prettier
12
+ 980b9f60eb49ed81e4957debe7b23a559a4d4b51
13
+ # Refactor component directories (#5074)
14
+ 1419538ea795caa391e3de809379f10639e9e764
.gitattributes CHANGED
@@ -33,3 +33,38 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ demo/blocks_flipper/screenshot.gif filter=lfs diff=lfs merge=lfs -text
37
+ demo/blocks_neural_instrument_coding/sax.wav filter=lfs diff=lfs merge=lfs -text
38
+ demo/calculator/screenshot.gif filter=lfs diff=lfs merge=lfs -text
39
+ demo/chatbot_core_components/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
40
+ demo/chatbot_core_components_simple/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
41
+ demo/dataset/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
42
+ demo/gallery_component/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
43
+ demo/hello_world_2/screenshot.gif filter=lfs diff=lfs merge=lfs -text
44
+ demo/image_mod/screenshot.png filter=lfs diff=lfs merge=lfs -text
45
+ demo/kitchen_sink/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
46
+ demo/rt-detr-object-detection/3285790-hd_1920_1080_30fps.mp4 filter=lfs diff=lfs merge=lfs -text
47
+ demo/sales_projections/screenshot.gif filter=lfs diff=lfs merge=lfs -text
48
+ demo/sepia_filter/screenshot.gif filter=lfs diff=lfs merge=lfs -text
49
+ demo/unispeech-speaker-verification/samples/kirsten_dunst.wav filter=lfs diff=lfs merge=lfs -text
50
+ demo/video_component/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
51
+ guides/assets/hf_demo.mp4 filter=lfs diff=lfs merge=lfs -text
52
+ guides/cn/assets/hf_demo.mp4 filter=lfs diff=lfs merge=lfs -text
53
+ js/component-test/src/lib/images/world.mp4 filter=lfs diff=lfs merge=lfs -text
54
+ js/spa/test/files/file_test.ogg filter=lfs diff=lfs merge=lfs -text
55
+ js/spa/test/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
56
+ test/test_files/rotated_image.jpeg filter=lfs diff=lfs merge=lfs -text
57
+ venv/bin/ruff filter=lfs diff=lfs merge=lfs -text
58
+ venv/lib/python3.10/site-packages/PIL/.dylibs/libfreetype.6.dylib filter=lfs diff=lfs merge=lfs -text
59
+ venv/lib/python3.10/site-packages/PIL/.dylibs/libharfbuzz.0.dylib filter=lfs diff=lfs merge=lfs -text
60
+ venv/lib/python3.10/site-packages/numpy/.dylibs/libgfortran.5.dylib filter=lfs diff=lfs merge=lfs -text
61
+ venv/lib/python3.10/site-packages/numpy/.dylibs/libscipy_openblas64_.dylib filter=lfs diff=lfs merge=lfs -text
62
+ venv/lib/python3.10/site-packages/numpy/_core/_multiarray_umath.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
63
+ venv/lib/python3.10/site-packages/numpy/_core/_simd.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
64
+ venv/lib/python3.10/site-packages/pandas/_libs/algos.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
65
+ venv/lib/python3.10/site-packages/pandas/_libs/groupby.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
66
+ venv/lib/python3.10/site-packages/pandas/_libs/hashtable.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
67
+ venv/lib/python3.10/site-packages/pandas/_libs/interval.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
68
+ venv/lib/python3.10/site-packages/pandas/_libs/join.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
69
+ venv/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
70
+ venv/lib/python3.10/site-packages/pydantic_core/_pydantic_core.cpython-310-darwin.so filter=lfs diff=lfs merge=lfs -text
.github/ISSUE_TEMPLATE/bug_report_template.yml ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "\U0001F41E Bug report"
2
+ description: Report a bug on Gradio
3
+ labels: ["bug"]
4
+ body:
5
+ - type: markdown
6
+ attributes:
7
+ value: |
8
+ Thanks for taking the time to fill out this bug report! Before you get started, please [search to see](https://github.com/gradio-app/gradio/issues) if an issue already exists for the bug you encountered
9
+ - type: textarea
10
+ id: bug-description
11
+ attributes:
12
+ label: Describe the bug
13
+ description: Please provide a concise description of what the bug is, in clear English. If you intend to submit a PR for this issue, tell us in the description.
14
+ placeholder: Bug description
15
+ validations:
16
+ required: true
17
+ - type: checkboxes
18
+ attributes:
19
+ label: Have you searched existing issues? 🔎
20
+ description: Please search to see if an issue already exists for the issue you encountered.
21
+ options:
22
+ - label: I have searched and found no existing issues
23
+ required: true
24
+ - type: textarea
25
+ id: reproduction
26
+ attributes:
27
+ label: Reproduction
28
+ description: Please provide a minimal example, with code, that can be run to reproduce the issue. Do NOT provide screenshots of code, or link to external repos or applications. Use ``` to format code blocks.
29
+ placeholder: Reproduction
30
+ value: |
31
+ ```python
32
+ import gradio as gr
33
+
34
+ ```
35
+ validations:
36
+ required: true
37
+ - type: textarea
38
+ id: screenshot
39
+ attributes:
40
+ label: Screenshot
41
+ description: If relevant, please include screenshot(s) of your Gradio app so that we can understand what the issue is.
42
+ - type: textarea
43
+ id: logs
44
+ attributes:
45
+ label: Logs
46
+ description: "Please include the full stacktrace of the errors you get from Python or Javascript. If you are running in a colab notebooks, you can get the logs with by setting `debug=True`, i.e: `gradio.Interface.launch(debug=True)`"
47
+ render: shell
48
+ - type: textarea
49
+ id: system-info
50
+ attributes:
51
+ label: System Info
52
+ description: Please ensure you are running the latest version of Gradio. You can get the Gradio version and all its dependencies by running `gradio environment`
53
+ render: shell
54
+ validations:
55
+ required: true
56
+ - type: dropdown
57
+ id: severity
58
+ attributes:
59
+ label: Severity
60
+ description: Select the severity of this issue
61
+ options:
62
+ - I can work around it
63
+ - Blocking usage of gradio
64
+ validations:
65
+ required: true
66
+ - type: markdown
67
+ attributes:
68
+ value: |
69
+ 📌 Please ensure that you have filled all of the required sections above, and that the reproduction you have provided is [minimal, complete, and reproducible](https://stackoverflow.com/help/minimal-reproducible-example). Incomplete issues will be closed.
.github/ISSUE_TEMPLATE/config.yml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ blank_issues_enabled: false
2
+ contact_links:
3
+ - name: 💡 General questions
4
+ url: https://discord.com/invite/feTf9x3ZSB
5
+ about: Have general questions about how to use Gradio? Please ask in our community Discord for quicker responses
.github/ISSUE_TEMPLATE/feature_request.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ name: ⚡ Feature request
3
+ about: Suggest an improvement or new feature or a new Guide for Gradio
4
+ title: ''
5
+ labels: ''
6
+ assignees: ''
7
+
8
+ ---
9
+ - [ ] I have searched to see if a similar issue already exists.
10
+
11
+
12
+ **Is your feature request related to a problem? Please describe.**
13
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
14
+
15
+ **Describe the solution you'd like**
16
+ A clear and concise description of what you want to happen.
17
+
18
+ **Additional context**
19
+ Add any other context or screenshots about the feature request here.
.github/PULL_REQUEST_TEMPLATE.md ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Description
2
+
3
+ Please include a concise summary, in clear English, of the changes in this pull request. If it closes an issue, please mention it here.
4
+
5
+ Closes: #(issue)
6
+
7
+ ## 🎯 PRs Should Target Issues
8
+
9
+ Before your create a PR, please check to see if there is [an existing issue](https://github.com/gradio-app/gradio/issues) for this change. If not, please create an issue before you create this PR, unless the fix is very small.
10
+
11
+ Not adhering to this guideline will result in the PR being closed.
12
+
13
+ ## Tests
14
+
15
+ 1. PRs will only be merged if tests pass on CI. To run the tests locally, please set up [your Gradio environment locally](https://github.com/gradio-app/gradio/blob/main/CONTRIBUTING.md) and run the tests: `bash scripts/run_all_tests.sh`
16
+
17
+ 2. You may need to run the linters: `bash scripts/format_backend.sh` and `bash scripts/format_frontend.sh`
18
+
.github/actions/changes/action.yml ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "prepare"
2
+ description: "Prepare workflow"
3
+
4
+ inputs:
5
+ token:
6
+ description: "GitHub token"
7
+ filter:
8
+ description: "Which filter to use"
9
+
10
+ outputs:
11
+ should_run:
12
+ description: "Whether to run the workflow"
13
+ value: ${{ steps.pr.outputs.should_run }}
14
+ pr_number:
15
+ description: "PR number"
16
+ value: ${{ steps.pr.outputs.pr_number }}
17
+ sha:
18
+ description: "SHA of the HEAD commit of the PR"
19
+ value: ${{ steps.pr.outputs.sha }}
20
+ source_repo:
21
+ description: "Source repo"
22
+ value: ${{ steps.pr.outputs.source_repo }}
23
+ source_branch:
24
+ description: "Source branch"
25
+ value: ${{ steps.pr.outputs.source_branch }}
26
+ labels:
27
+ description: "Labels on the PR"
28
+ value: ${{ steps.pr.outputs.labels }}
29
+ run_id:
30
+ description: "Run ID"
31
+ value: ${{ steps.pr.outputs.run_id }}
32
+ gradio_version:
33
+ description: "Gradio version"
34
+ value: ${{ steps.pr.outputs.gradio_version }}
35
+
36
+ runs:
37
+ using: "composite"
38
+ steps:
39
+ - uses: actions/checkout@v4
40
+ - uses: gradio-app/github/actions/filter-paths@main
41
+ id: changes
42
+ with:
43
+ token: ${{ inputs.token }}
44
+ filter: ${{ inputs.filter }}
45
+ - name: get gradio version
46
+ id: gradio_version
47
+ shell: bash
48
+ run: |
49
+ GRADIO_VERSION=$(curl -s https://pypi.org/pypi/gradio/json | grep -o '"version":"[^"]*"' | cut -d'"' -f4 | head -n 1)
50
+ echo "gradio_version=$GRADIO_VERSION" >> $GITHUB_OUTPUT
51
+ - name: convert to JSON
52
+ uses: gradio-app/github/actions/input-to-json@main
53
+ with:
54
+ path: output.json
55
+ sha: ${{ github.event.pull_request.head.sha || github.sha }}
56
+ source_repo: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
57
+ source_branch: ${{ github.event.pull_request.head.ref || github.ref }}
58
+ pr_number: ${{ github.event.pull_request.number || 'false'}}
59
+ should_run: ${{ steps.changes.outputs.match }}
60
+ labels: "[${{ join(github.event.pull_request.labels.*.name, ', ') }}]"
61
+ run_id: ${{ github.run_id }}
62
+ gradio_version: ${{ steps.gradio_version.outputs.gradio_version }}
63
+ - name: cat json
64
+ run: cat output.json
65
+ shell: bash
66
+ - name: upload JSON
67
+ uses: actions/upload-artifact@v4
68
+ with:
69
+ name: changes
70
+ path: output.json
71
+ - name: set outputs
72
+ id: pr
73
+ uses: gradio-app/github/actions/json-to-output@main
74
+ with:
75
+ path: output.json
76
+ - name: echo outputs
77
+ run: echo "${{ toJson(steps.pr.outputs) }}"
78
+ shell: bash
.github/actions/install-all-deps/action.yml ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "install all deps"
2
+ description: "Install all deps"
3
+
4
+ inputs:
5
+ skip_build:
6
+ description: "Skip build"
7
+ default: "false"
8
+ build_lite:
9
+ description: "Build lite"
10
+ default: "false"
11
+ test:
12
+ description: "Test"
13
+ default: "false"
14
+ python_version:
15
+ description: "Python version"
16
+ default: "3.10"
17
+ os:
18
+ description: "OS"
19
+ default: "ubuntu-latest"
20
+ outputs:
21
+ venv_activate:
22
+ description: "Venv activate"
23
+ value: ${{ steps.venv.outputs.venv_activate }}
24
+
25
+ runs:
26
+ using: "composite"
27
+ steps:
28
+ - name: Set venv binary path (linux)
29
+ if: ${{ inputs.os == 'ubuntu-latest' }}
30
+ shell: bash
31
+ run: |
32
+ echo "VENV_ACTIVATE=venv/bin/activate" >> $GITHUB_ENV
33
+ - name: Set venv binary path (windows)
34
+ if: ${{ inputs.os == 'windows-latest' }}
35
+ shell: bash
36
+ run: |
37
+ echo "VENV_ACTIVATE=venv\\\Scripts\\\activate" >> $GITHUB_ENV
38
+ - name: log venv binary path
39
+ id: venv
40
+ shell: bash
41
+ run: |
42
+ echo "venv_activate=$VENV_ACTIVATE" >> $GITHUB_OUTPUT
43
+ - name: Install Python
44
+ uses: actions/setup-python@v5
45
+ with:
46
+ python-version: ${{ inputs.python_version }}
47
+ cache: pip
48
+ cache-dependency-path: |
49
+ client/python/requirements.txt
50
+ requirements.txt
51
+ test/requirements.txt
52
+ - name: Create env
53
+ shell: bash
54
+ run: |
55
+ python -m pip install --upgrade virtualenv
56
+ python -m virtualenv venv
57
+ # - uses: actions/cache@v4
58
+ # id: cache
59
+ # with:
60
+ # path: |
61
+ # venv/*
62
+ # client/python/venv
63
+ # restore-keys: |
64
+ # gradio-lib-${{inputs.python_version}}-${{inputs.os}}-latest-pip-
65
+ # key: "gradio-lib-${{inputs.python_version}}-${{inputs.os}}-latest-pip-${{ hashFiles('client/python/requirements.txt') }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('test/requirements.txt') }}-${{ hashFiles('client/python/test/requirements.txt') }}${{ inputs.test == 'true' && '-test' || ''}}"
66
+ - name: Install ffmpeg
67
+ uses: FedericoCarboni/setup-ffmpeg@583042d32dd1cabb8bd09df03bde06080da5c87c # @v2
68
+ - name: Install test dependencies
69
+ if: inputs.test == 'true'
70
+ # && steps.cache.outputs.cache-hit != 'true'
71
+ shell: bash
72
+ run: |
73
+ . ${{ env.VENV_ACTIVATE }}
74
+ python -m pip install -r test/requirements.txt
75
+ python -m pip install -r client/python/test/requirements.txt
76
+ - name: Install Gradio and Client Libraries Locally (Linux)
77
+ shell: bash
78
+ run: |
79
+ . ${{ env.VENV_ACTIVATE }}
80
+ python -m pip install -e client/python
81
+ python -m pip install -e ".[oauth]"
82
+ - name: install-frontend
83
+ uses: "gradio-app/gradio/.github/actions/install-frontend-deps@main"
84
+ with:
85
+ skip_build: ${{ inputs.skip_build }}
86
+ build_lite: ${{ inputs.build_lite }}
87
+ - name: generate json
88
+ shell: bash
89
+ if: inputs.os == 'ubuntu-latest'
90
+ run: |
91
+ . ${{ env.VENV_ACTIVATE }}
92
+ pip install boto3 && python js/_website/generate_jsons/generate.py
.github/actions/install-frontend-deps/action.yml ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "install frontend"
2
+ description: "Install frontend deps"
3
+
4
+ inputs:
5
+ skip_build:
6
+ description: "Skip build"
7
+ default: "false"
8
+ build_lite:
9
+ description: "Build lite"
10
+ default: "false"
11
+
12
+ runs:
13
+ using: "composite"
14
+ steps:
15
+ # - uses: actions/cache@v4
16
+ # id: frontend-cache
17
+ # with:
18
+ # path: |
19
+ # gradio/templates/*
20
+ # key: gradio-lib-front-end-${{ hashFiles('js/**', 'client/js/**')}}
21
+ - name: Install pnpm
22
+ uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # @v4
23
+ with:
24
+ version: 9.1.x
25
+ - uses: actions/setup-node@v4
26
+ with:
27
+ node-version: 18
28
+ cache: pnpm
29
+ cache-dependency-path: pnpm-lock.yaml
30
+ - name: Install deps
31
+ shell: bash
32
+ run: pnpm i --frozen-lockfile --ignore-scripts
33
+ - name: Build Css
34
+ shell: bash
35
+ run: pnpm css
36
+ - name: Build frontend
37
+ if: inputs.skip_build == 'false'
38
+ # && steps.frontend-cache.outputs.cache-hit != 'true'
39
+ shell: bash
40
+ run: pnpm build
41
+ - name: generate types
42
+ if: inputs.skip_build == 'false' || inputs.build_lite == 'true'
43
+ shell: bash
44
+ run: pnpm package
45
+ - name: Build frontend lite
46
+ if: inputs.build_lite == 'true'
47
+ shell: bash
48
+ run: |
49
+ . venv/bin/activate
50
+ python -m pip install -U build hatch packaging>=23.2 # packaging>=23.2 is needed to build Lite due to https://github.com/pypa/hatch/issues/1381
51
+ pnpm build:lite
.github/configs/semgrep_rules.yaml ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ rules:
2
+ - id: third-party-action-not-pinned-to-commit-sha
3
+ patterns:
4
+ - pattern-inside: "{steps: ...}"
5
+ - pattern: |
6
+ uses: "$USES"
7
+ - metavariable-pattern:
8
+ metavariable: $USES
9
+ language: generic
10
+ patterns:
11
+ - pattern-not-regex: ^[.]/
12
+ - pattern-not-regex: ^actions/
13
+ - pattern-not-regex: ^github/
14
+ - pattern-not-regex: ^gradio-app/gradio
15
+ - pattern-not-regex: ^gradio-app/github
16
+ - pattern-not-regex: "@[0-9a-f]{40}$"
17
+ - pattern-not-regex: ^docker://.*@sha256:[0-9a-f]{64}$
18
+ - pattern-not-regex: ^docker://docker$
19
+ message:
20
+ An action sourced from a third-party repository on GitHub is not pinned
21
+ to a full length commit SHA. Pinning an action to a full length commit SHA
22
+ is currently the only way to use an action as an immutable release.
23
+ Pinning to a particular SHA helps mitigate the risk of a bad actor adding
24
+ a backdoor to the action's repository, as they would need to generate a
25
+ SHA-1 collision for a valid Git object payload.
26
+ languages:
27
+ - yaml
28
+ severity: WARNING
29
+ metadata:
30
+ cwe:
31
+ - "CWE-1357: Reliance on Insufficiently Trustworthy Component"
32
+ - "CWE-353: Missing Support for Integrity Check"
33
+ owasp: A06:2021 - Vulnerable and Outdated Components
34
+ references:
35
+ - https://owasp.org/Top10/A06_2021-Vulnerable_and_Outdated_Components
36
+ - https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-third-party-actions
37
+ category: security
38
+ technology:
39
+ - github-actions
40
+ subcategory:
41
+ - vuln
42
+ likelihood: LOW
43
+ impact: LOW
44
+ confidence: HIGH
45
+ license: Commons Clause License Condition v1.0[LGPL-2.1-only]
46
+ vulnerability_class:
47
+ - Cryptographic Issues
48
+ - Other
49
+
50
+ - id: insecure-file-permissions
51
+ languages:
52
+ - python
53
+ severity: ERROR
54
+ message: These permissions `$BITS` are widely permissive and grant access to
55
+ more people than may be necessary. A good default is `0o644` which gives
56
+ read and write access to yourself and read access to everyone else.
57
+ patterns:
58
+ - pattern-inside: os.$METHOD(...)
59
+ - pattern-either:
60
+ - patterns:
61
+ - pattern: os.$METHOD($FILE, $BITS, ...)
62
+ - metavariable-comparison:
63
+ comparison: $BITS >= 0o650 and $BITS < 0o100000
64
+ - patterns:
65
+ - pattern: os.$METHOD($FILE, $BITS)
66
+ - metavariable-comparison:
67
+ comparison: $BITS >= 0o100650
68
+ - patterns:
69
+ - pattern: os.$METHOD($FILE, $BITS, ...)
70
+ - metavariable-pattern:
71
+ metavariable: $BITS
72
+ patterns:
73
+ - pattern-either:
74
+ - pattern: <... stat.S_IWGRP ...>
75
+ - pattern: <... stat.S_IXGRP ...>
76
+ - pattern: <... stat.S_IWOTH ...>
77
+ - pattern: <... stat.S_IXOTH ...>
78
+ - pattern: <... stat.S_IRWXO ...>
79
+ - pattern: <... stat.S_IRWXG ...>
80
+ - patterns:
81
+ - pattern: os.$METHOD($FILE, $EXPR | $MOD, ...)
82
+ - metavariable-comparison:
83
+ comparison: $MOD == 0o111
84
+ - metavariable-pattern:
85
+ metavariable: $METHOD
86
+ patterns:
87
+ - pattern-either:
88
+ - pattern: chmod
89
+ - pattern: lchmod
90
+ - pattern: fchmod
91
+ metadata:
92
+ category: security
93
+ owasp:
94
+ - A01:2021 - Broken Access Control
95
+ cwe:
96
+ - "CWE-276: Incorrect Default Permissions"
97
+ technology:
98
+ - python
99
+ references:
100
+ - https://owasp.org/Top10/A01_2021-Broken_Access_Control
101
+ cwe2022-top25: true
102
+ cwe2021-top25: true
103
+ subcategory:
104
+ - vuln
105
+ likelihood: LOW
106
+ impact: MEDIUM
107
+ confidence: MEDIUM
108
+ license: Commons Clause License Condition v1.0[LGPL-2.1-only]
109
+ vulnerability_class:
110
+ - Improper Authorization
.github/filters.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "gradio": [
3
+ "client/python/**",
4
+ "gradio/**",
5
+ "requirements.txt",
6
+ ".github/**",
7
+ "scripts/**",
8
+ "test/**"
9
+ ],
10
+ "js": [
11
+ "js/**",
12
+ "client/js/**",
13
+ ".github/**",
14
+ "package.json",
15
+ "pnpm-lock.yaml",
16
+ "tsconfig.json",
17
+ ".config/**"
18
+ ],
19
+ "functional": [
20
+ ".github/**",
21
+ "client/**",
22
+ "demo/**",
23
+ "gradio/**",
24
+ "js/**",
25
+ "scripts/**",
26
+
27
+ "globals.d.ts",
28
+ "package.json",
29
+ "pnpm-lock.yaml",
30
+ "pyproject.toml",
31
+ "requirements.txt",
32
+ ".config/**"
33
+ ],
34
+ "visual": [
35
+ ".github/workflows/deploy-chromatic.yml",
36
+ "js!(/_website)/**",
37
+ "package.json"
38
+ ],
39
+ "website": [
40
+ "js/_website/**",
41
+ "package.json",
42
+ "pnpm-lock.yaml",
43
+ "guides/**",
44
+ "README.md",
45
+ "CHANGELOG.md",
46
+ "gradio/**",
47
+ "client/**",
48
+ "demo/**",
49
+ ".github/deploy-website.yml"
50
+ ]
51
+ }
.github/stale ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Number of days of inactivity before an issue becomes stale
2
+ daysUntilStale: 30
3
+ # Number of days of inactivity before a stale issue is closed
4
+ daysUntilClose: 7
5
+ # Issues with these labels will never be considered stale
6
+ exemptLabels:
7
+ - pinned
8
+ - security
9
+ # Label to use when marking an issue as stale
10
+ staleLabel: wontfix
11
+ # Comment to post when marking an issue as stale. Set to `false` to disable
12
+ markComment: >
13
+ This issue has been automatically marked as stale because it has not had
14
+ recent activity. It will be closed if no further activity occurs. Thank you
15
+ for your contributions.
16
+ # Comment to post when closing a stale issue. Set to `false` to disable
17
+ closeComment: false
.github/workflows/comment-queue.yml ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Comment on pull request without race conditions
2
+
3
+ on:
4
+ workflow_call:
5
+ inputs:
6
+ pr_number:
7
+ type: string
8
+ message:
9
+ required: true
10
+ type: string
11
+ tag:
12
+ required: false
13
+ type: string
14
+ default: "previews"
15
+ additional_text:
16
+ required: false
17
+ type: string
18
+ default: ""
19
+ secrets:
20
+ gh_token:
21
+ required: true
22
+
23
+ jobs:
24
+ comment:
25
+ environment: comment_pr
26
+ concurrency:
27
+ group: ${{inputs.pr_number || inputs.tag}}
28
+ runs-on: ubuntu-latest
29
+ steps:
30
+ - name: comment on pr
31
+ uses: "gradio-app/github/actions/comment-pr@main"
32
+ with:
33
+ gh_token: ${{ secrets.gh_token }}
34
+ tag: ${{ inputs.tag }}
35
+ pr_number: ${{ inputs.pr_number}}
36
+ message: ${{ inputs.message }}
37
+ additional_text: ${{ inputs.additional_text }}
.github/workflows/delete-stale-spaces.yml ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # safe runs from main
2
+
3
+ name: Delete Stale Spaces
4
+
5
+ on:
6
+ schedule:
7
+ - cron: "0 0 * * *"
8
+ workflow_dispatch:
9
+ inputs:
10
+ daysStale:
11
+ description: "How stale a space needs to be to be deleted (days)"
12
+ required: true
13
+ default: "7"
14
+
15
+ permissions: {}
16
+
17
+ jobs:
18
+ delete-old-spaces:
19
+ permissions:
20
+ contents: read
21
+ environment: deploy_spaces
22
+ runs-on: ubuntu-latest
23
+ steps:
24
+ - uses: actions/checkout@v4
25
+ - name: Install Python
26
+ uses: actions/setup-python@v5
27
+ with:
28
+ python-version: "3.10"
29
+ - name: Install pip
30
+ run: python -m pip install pip wheel requests
31
+ - name: Install Hub Client Library
32
+ run: pip install huggingface-hub==0.9.1
33
+ - name: Set daysStale
34
+ env:
35
+ DEFAULT_DAYS_STALE: "7"
36
+ DAYS_STALE: ${{ github.event.inputs.daysStale || env.DEFAULT_DAYS_STALE}}
37
+ run: echo DAYS_STALE= "$DAYS_STALE" >> $GITHUB_ENV
38
+ - name: Find and delete stale spaces
39
+ run: |
40
+ python scripts/delete_old_spaces.py $DAYS_STALE \
41
+ gradio-pr-deploys \
42
+ ${{ secrets.SPACES_DEPLOY_TOKEN }}
.github/workflows/generate-changeset.yml ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Generate changeset
2
+ on:
3
+ workflow_run:
4
+ workflows: ["trigger-changeset"]
5
+ types:
6
+ - completed
7
+
8
+ env:
9
+ CI: true
10
+ NODE_OPTIONS: "--max-old-space-size=4096"
11
+
12
+ concurrency:
13
+ group: "${{ github.event.workflow_run.head_repository.full_name }}-${{ github.event.workflow_run.head_branch }}-${{ github.workflow_ref }}"
14
+ cancel-in-progress: true
15
+
16
+ permissions: {}
17
+
18
+ jobs:
19
+ get-pr:
20
+ runs-on: ubuntu-latest
21
+ permissions:
22
+ contents: read
23
+ pull-requests: read
24
+ if: github.event.workflow_run.conclusion == 'success'
25
+ outputs:
26
+ found_pr: ${{ steps.pr_details.outputs.found_pr }}
27
+ pr_number: ${{ steps.pr_details.outputs.pr_number }}
28
+ source_repo: ${{ steps.pr_details.outputs.source_repo }}
29
+ source_branch: ${{ steps.pr_details.outputs.source_branch }}
30
+ steps:
31
+ - name: get pr details
32
+ id: pr_details
33
+ uses: gradio-app/github/actions/find-pr@main
34
+ with:
35
+ github_token: ${{ secrets.GITHUB_TOKEN }}
36
+ comment-changes-start:
37
+ uses: "./.github/workflows/comment-queue.yml"
38
+ needs: get-pr
39
+ secrets:
40
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
41
+ with:
42
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
43
+ message: changes~pending~null
44
+ version:
45
+ permissions:
46
+ contents: read
47
+ environment: changeset
48
+ name: version
49
+ needs: get-pr
50
+ runs-on: ubuntu-latest
51
+ if: needs.get-pr.outputs.found_pr == 'true'
52
+ outputs:
53
+ skipped: ${{ steps.version.outputs.skipped }}
54
+ comment_url: ${{ steps.version.outputs.comment_url }}
55
+ steps:
56
+ - uses: actions/checkout@v4
57
+ with:
58
+ repository: ${{ needs.get-pr.outputs.source_repo }}
59
+ ref: ${{ needs.get-pr.outputs.source_branch }}
60
+ fetch-depth: 0
61
+ token: ${{ secrets.COMMENT_TOKEN }}
62
+ - name: generate changeset
63
+ id: version
64
+ uses: "gradio-app/github/actions/generate-changeset@main"
65
+ with:
66
+ github_token: ${{ secrets.CHANGESET_GITHUB_TOKEN }}
67
+ main_pkg: gradio
68
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
69
+ branch_name: ${{ needs.get-pr.outputs.source_branch }}
70
+ comment-changes-skipped:
71
+ uses: "./.github/workflows/comment-queue.yml"
72
+ needs: [get-pr, version]
73
+ if: needs.version.result == 'success' && needs.version.outputs.skipped == 'true'
74
+ secrets:
75
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
76
+ with:
77
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
78
+ message: changes~warning~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
79
+ comment-changes-success:
80
+ uses: "./.github/workflows/comment-queue.yml"
81
+ needs: [get-pr, version]
82
+ if: needs.version.result == 'success' && needs.version.outputs.skipped == 'false'
83
+ secrets:
84
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
85
+ with:
86
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
87
+ message: changes~success~${{ needs.version.outputs.comment_url }}
88
+ comment-changes-failure:
89
+ uses: "./.github/workflows/comment-queue.yml"
90
+ needs: [get-pr, version]
91
+ if: always() && needs.version.result == 'failure'
92
+ secrets:
93
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
94
+ with:
95
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
96
+ message: changes~failure~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
.github/workflows/npm-previews.yml ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "npm"
2
+
3
+ on:
4
+ pull_request:
5
+
6
+ env:
7
+ CI: true
8
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "1"
9
+ NODE_OPTIONS: "--max-old-space-size=4096"
10
+
11
+ concurrency:
12
+ group: "${{ github.event.pull_request.number }}-${{ github.ref_name }}-${{ github.workflow }}"
13
+ cancel-in-progress: true
14
+
15
+ permissions: {}
16
+
17
+ jobs:
18
+ changes:
19
+ permissions:
20
+ contents: read
21
+ pull-requests: read
22
+ name: "changes"
23
+ runs-on: ubuntu-latest
24
+ outputs:
25
+ should_run: ${{ steps.changes.outputs.should_run }}
26
+ sha: ${{ steps.changes.outputs.sha }}
27
+ pr_number: ${{ steps.changes.outputs.pr_number }}
28
+ source_branch: ${{ steps.changes.outputs.source_branch }}
29
+ source_repo: ${{ steps.changes.outputs.source_repo }}
30
+ steps:
31
+ - uses: actions/checkout@v4
32
+ - uses: "gradio-app/gradio/.github/actions/changes@main"
33
+ id: changes
34
+ with:
35
+ filter: "js"
36
+ token: ${{ secrets.GITHUB_TOKEN }}
37
+ preview:
38
+ permissions:
39
+ contents: read
40
+ name: npm-previews
41
+ runs-on: ubuntu-22.04
42
+ needs: changes
43
+ if: needs.changes.outputs.should_run == 'true'
44
+ steps:
45
+ - uses: actions/checkout@v4
46
+ - name: install dependencies
47
+ uses: "gradio-app/gradio/.github/actions/install-frontend-deps@main"
48
+ with:
49
+ skip_build: true
50
+ - name: build client
51
+ run: pnpm --filter @gradio/client --filter @gradio/wasm --filter @gradio/preview build
52
+ - name: publish npm previews
53
+ run: pnpx pkg-pr-new publish './js/*' './client/js' --comment=off
.github/workflows/previews-build.yml ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "previews-build"
2
+
3
+ on:
4
+ workflow_dispatch:
5
+ pull_request:
6
+
7
+ env:
8
+ CI: true
9
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "1"
10
+ NODE_OPTIONS: "--max-old-space-size=4096"
11
+
12
+ concurrency:
13
+ group: "${{ github.event.pull_request.number }}-${{ github.ref_name }}-${{ github.workflow }}"
14
+ cancel-in-progress: true
15
+
16
+ permissions: {}
17
+
18
+ jobs:
19
+ changes:
20
+ name: "changes"
21
+ runs-on: ubuntu-latest
22
+ permissions:
23
+ contents: read
24
+ pull-requests: read
25
+ outputs:
26
+ should_run: ${{ steps.changes.outputs.should_run }}
27
+ sha: ${{ steps.changes.outputs.sha }}
28
+ gradio_version: ${{ steps.changes.outputs.gradio_version }}
29
+ steps:
30
+ - uses: actions/checkout@v4
31
+ - uses: "gradio-app/gradio/.github/actions/changes@main"
32
+ id: changes
33
+ with:
34
+ filter: "functional"
35
+ token: ${{ secrets.GITHUB_TOKEN }}
36
+ build:
37
+ permissions:
38
+ contents: read
39
+ name: "previews-build"
40
+ runs-on: ubuntu-22.04
41
+ needs: changes
42
+ if: needs.changes.outputs.should_run == 'true' || github.event_name == 'workflow_dispatch'
43
+ steps:
44
+ - uses: actions/checkout@v4
45
+ - name: install dependencies
46
+ uses: "gradio-app/gradio/.github/actions/install-all-deps@main"
47
+ with:
48
+ python_version: "3.10"
49
+ build_lite: "true"
50
+ - name: Package Lite NPM package
51
+ working-directory: js/lite
52
+ run: pnpm pack --pack-destination .
53
+ - name: Upload Lite NPM package
54
+ uses: actions/upload-artifact@v4
55
+ with:
56
+ name: gradio-lite-tar
57
+ path: js/lite/gradio-lite-*.tgz
58
+ - name: install deps
59
+ run: python -m pip install build
60
+ - name: Build pr package
61
+ run: |
62
+ python -c 'import json; j = json.load(open("gradio/package.json")); j["version"] = "${{ needs.changes.outputs.gradio_version }}"; json.dump(j, open("gradio/package.json", "w"))'
63
+ python3 -m build -w
64
+ - name: Upload Python package
65
+ uses: actions/upload-artifact@v4
66
+ with:
67
+ name: gradio-build
68
+ path: dist/gradio-${{ needs.changes.outputs.gradio_version }}-py3-none-any.whl
69
+ - name: copy demos
70
+ uses: "gradio-app/github/actions/copy-demos@main"
71
+ with:
72
+ gradio_version: "https://gradio-pypi-previews.s3.amazonaws.com/${{ needs.changes.outputs.sha }}/gradio-${{ needs.changes.outputs.gradio_version }}-py3-none-any.whl"
73
+ gradio_client_version: "gradio-client @ git+https://github.com/gradio-app/gradio@${{ needs.changes.outputs.sha }}#subdirectory=client/python"
74
+ - name: upload demos
75
+ uses: actions/upload-artifact@v4
76
+ with:
77
+ name: all_demos
78
+ path: demo
79
+ - name: Create JS client tarball
80
+ id: create_js_tarball
81
+ continue-on-error: true
82
+ run: |
83
+ cd client/js
84
+ tarball_name=$(npm pack)
85
+ echo "tarball_name=client/js/$tarball_name" >> $GITHUB_OUTPUT
86
+ - name: Upload JS client tarball artifact
87
+ uses: actions/upload-artifact@v4
88
+ with:
89
+ name: js-client-tarball
90
+ path: ${{ steps.create_js_tarball.outputs.tarball_name }}
.github/workflows/previews-deploy.yml ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "previews-deploy"
2
+
3
+ on:
4
+ workflow_run:
5
+ workflows: ["previews-build"]
6
+ types:
7
+ - completed
8
+
9
+ concurrency:
10
+ group: "${{ github.event.workflow_run.head_repository.full_name }}-${{ github.event.workflow_run.head_branch }}-${{ github.workflow_ref }}"
11
+ cancel-in-progress: true
12
+
13
+ jobs:
14
+ changes:
15
+ name: "changes"
16
+ runs-on: ubuntu-latest
17
+ if: github.event.workflow_run.conclusion == 'success'
18
+ permissions:
19
+ actions: read
20
+ outputs:
21
+ should_run: ${{ steps.json.outputs.should_run }}
22
+ sha: ${{ steps.json.outputs.sha }}
23
+ pr_number: ${{ steps.json.outputs.pr_number }}
24
+ source_branch: ${{ steps.json.outputs.source_branch }}
25
+ source_repo: ${{ steps.json.outputs.source_repo }}
26
+ labels: ${{ steps.json.outputs.labels }}
27
+ run_id: ${{ steps.json.outputs.run_id }}
28
+ gradio_version: ${{ steps.json.outputs.gradio_version }}
29
+ steps:
30
+ - name: Download artifact
31
+ uses: actions/download-artifact@v4
32
+ with:
33
+ name: changes
34
+ github-token: ${{ secrets.GITHUB_TOKEN }}
35
+ run-id: ${{ github.event.workflow_run.id }}
36
+ - uses: gradio-app/github/actions/json-to-output@main
37
+ id: json
38
+ with:
39
+ path: output.json
40
+
41
+ comment-spaces-start:
42
+ needs: changes
43
+ uses: "./.github/workflows/comment-queue.yml"
44
+ if: ${{ needs.changes.outputs.should_run == 'true' }}
45
+ secrets:
46
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
47
+ with:
48
+ pr_number: ${{ needs.changes.outputs.pr_number }}
49
+ message: spaces~pending~null
50
+ deploy:
51
+ name: "previews-deploy"
52
+ environment: deploy_spaces
53
+ outputs:
54
+ space_url: ${{ steps.upload-demo.outputs.SPACE_URL }}
55
+ js_tarball_url: ${{ steps.upload_js_tarball.outputs.js_tarball_url }}
56
+ needs: changes
57
+ if: ${{ (github.event.workflow_run.conclusion == 'success' && needs.changes.outputs.should_run == 'true') || github.event.workflow_run.event == 'workflow_dispatch' }}
58
+ runs-on: ubuntu-latest
59
+ permissions:
60
+ actions: read
61
+ steps:
62
+ - name: Download all artifacts
63
+ uses: actions/download-artifact@v4
64
+ with:
65
+ run-id: ${{ github.event.workflow_run.id }}
66
+ github-token: ${{ secrets.GITHUB_TOKEN }}
67
+ - name: list artifacts
68
+ run: ls -R .
69
+
70
+ - name: Set wheel name
71
+ id: set_wheel_name
72
+ run: |
73
+ wheel_file=$(find ./gradio-build -maxdepth 1 -type f -name "*.whl" -print -quit)
74
+ echo "wheel_name=$wheel_file" >> $GITHUB_OUTPUT
75
+ - name: Upload wheel
76
+ run: |
77
+ export AWS_ACCESS_KEY_ID=${{ secrets.PYPI_PREVIEWS_AWS_S3_ACCESS_KEY }}
78
+ export AWS_SECRET_ACCESS_KEY=${{ secrets.PYPI_PREVIEWS_AWS_S3_SECRET_ACCESS_KEY }}
79
+ export AWS_DEFAULT_REGION=us-east-1
80
+ aws s3 cp ${{ steps.set_wheel_name.outputs.wheel_name}} s3://gradio-pypi-previews/${{ needs.changes.outputs.sha }}/
81
+
82
+ - name: Set tarball name
83
+ id: set_tarball_name
84
+ run: |
85
+ tar_gz_file=$(find ./js-client-tarball -maxdepth 1 -type f -name "*.tgz" -print -quit)
86
+ echo "tarball_path=$tar_gz_file" >> $GITHUB_OUTPUT
87
+ echo "tarball_name=$(basename $tar_gz_file)" >> $GITHUB_OUTPUT
88
+ - name: Upload JS Client Tarball to S3
89
+ id: upload_js_tarball
90
+ continue-on-error: true
91
+ run: |
92
+ export AWS_ACCESS_KEY_ID=${{ secrets.NPM_PREVIEWS_AWS_S3_ACCESS_KEY }}
93
+ export AWS_SECRET_ACCESS_KEY=${{ secrets.NPM_PREVIEWS_AWS_S3_SECRET_ACCESS_KEY }}
94
+ export AWS_DEFAULT_REGION=us-east-1
95
+ aws s3 cp ${{ steps.set_tarball_name.outputs.tarball_path }} s3://gradio-npm-previews/${{ needs.changes.outputs.sha }}/
96
+ echo "js_tarball_url=https://gradio-npm-previews.s3.amazonaws.com/${{ needs.changes.outputs.sha }}/${{ steps.set_tarball_name.outputs.tarball_name }}" >> $GITHUB_OUTPUT
97
+ - name: Upload Lite to S3
98
+ id: upload_lite
99
+ run: |
100
+ export AWS_ACCESS_KEY_ID=${{ secrets.LITE_PREVIEWS_AWS_S3_ACCESS_KEY }}
101
+ export AWS_SECRET_ACCESS_KEY=${{ secrets.LITE_PREVIEWS_AWS_S3_SECRET_ACCESS_KEY }}
102
+ export AWS_DEFAULT_REGION=us-east-1
103
+ mkdir -p ./gradio-lite-files
104
+ tar -xzf ./gradio-lite-tar/gradio-lite-*.tgz -C ./gradio-lite-files
105
+ aws s3 cp ./gradio-lite-files/package/ s3://gradio-lite-previews/${{ needs.changes.outputs.sha }}/ --recursive
106
+ - name: Install Hub Client Library
107
+ run: |
108
+ python3 -m venv venv
109
+ . venv/bin/activate
110
+ pip install huggingface-hub==0.23.2
111
+ # temporary, but ensures the script cannot be modified in a PR
112
+ - name: Get deploy scripts
113
+ run: |
114
+ mkdir scripts
115
+ curl https://raw.githubusercontent.com/gradio-app/gradio/main/scripts/upload_demo_to_space.py -o scripts/upload_demo_to_space.py
116
+ curl https://raw.githubusercontent.com/gradio-app/gradio/main/scripts/upload_website_demos.py -o scripts/upload_website_demos.py
117
+ - name: make dirs
118
+ run: mkdir -p demo && mv all_demos/* demo/
119
+ - name: Upload demo to spaces
120
+ if: github.event.workflow_run.event == 'pull_request'
121
+ id: upload-demo
122
+ run: |
123
+ . venv/bin/activate
124
+ python scripts/upload_demo_to_space.py all_demos \
125
+ gradio-pr-deploys/pr-${{ needs.changes.outputs.pr_number }}-all-demos \
126
+ ${{ secrets.SPACES_DEPLOY_TOKEN }} \
127
+ --gradio-version ${{ needs.changes.outputs.gradio_version }} > url.txt
128
+ echo "SPACE_URL=$(tail -n 1 url.txt)" >> $GITHUB_OUTPUT
129
+ - name: Upload Website Demos
130
+ if: github.event.workflow_run.event == 'workflow_dispatch'
131
+ id: upload-website-demos
132
+ run: |
133
+ . venv/bin/activate
134
+ python scripts/upload_website_demos.py --AUTH_TOKEN ${{ secrets.WEBSITE_SPACES_DEPLOY_TOKEN }} \
135
+ --WHEEL_URL https://gradio-pypi-previews.s3.amazonaws.com/${{ needs.changes.outputs.sha }}/ \
136
+ --CLIENT_URL "gradio-client @ git+https://github.com/gradio-app/gradio@${{ needs.changes.outputs.sha }}#subdirectory=client/python" \
137
+ --GRADIO_VERSION ${{ needs.changes.outputs.gradio_version }}
138
+
139
+ comment-spaces-success:
140
+ uses: "./.github/workflows/comment-queue.yml"
141
+ needs: [deploy, changes]
142
+ if: needs.changes.outputs.should_run == 'true' && needs.deploy.result == 'success'
143
+ secrets:
144
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
145
+ with:
146
+ pr_number: ${{ needs.changes.outputs.pr_number }}
147
+ message: spaces~success~${{ needs.deploy.outputs.space_url }}
148
+ additional_text: |
149
+ **Install Gradio from this PR**
150
+ ```bash
151
+ pip install https://gradio-pypi-previews.s3.amazonaws.com/${{ needs.changes.outputs.sha }}/gradio-${{ needs.changes.outputs.gradio_version }}-py3-none-any.whl
152
+ ```
153
+
154
+ **Install Gradio Python Client from this PR**
155
+ ```bash
156
+ pip install "gradio-client @ git+https://github.com/gradio-app/gradio@${{ needs.changes.outputs.sha }}#subdirectory=client/python"
157
+ ```
158
+
159
+ **Install Gradio JS Client from this PR**
160
+ ```bash
161
+ npm install ${{ needs.deploy.outputs.js_tarball_url }}
162
+ ```
163
+
164
+ **Use Lite from this PR**
165
+ ```html
166
+ <script type="module" src="https://gradio-lite-previews.s3.amazonaws.com/${{ needs.changes.outputs.sha }}/dist/lite.js""></script>
167
+ ```
168
+ comment-spaces-failure:
169
+ uses: "./.github/workflows/comment-queue.yml"
170
+ needs: [deploy, changes]
171
+ if: always() && needs.deploy.result == 'failure' && needs.changes.outputs.should_run == 'true'
172
+ secrets:
173
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
174
+ with:
175
+ pr_number: ${{ needs.changes.outputs.pr_number }}
176
+ message: spaces~failure~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
.github/workflows/publish.yml ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # safe runs from main
2
+
3
+ name: publish
4
+ on:
5
+ push:
6
+ branches:
7
+ - main
8
+ - 5.0-dev
9
+
10
+ env:
11
+ CI: true
12
+ PNPM_CACHE_FOLDER: .pnpm-store
13
+ NODE_OPTIONS: "--max-old-space-size=4096"
14
+ jobs:
15
+ version_or_publish:
16
+ runs-on: ubuntu-22.04
17
+ environment: publish
18
+ steps:
19
+ - name: checkout repo
20
+ uses: actions/checkout@v4
21
+ with:
22
+ fetch-depth: 0
23
+ persist-credentials: false
24
+ - name: install dependencies
25
+ uses: "gradio-app/gradio/.github/actions/install-all-deps@main"
26
+ with:
27
+ python_version: "3.10"
28
+ skip_build: "false"
29
+ - name: Build packages
30
+ run: |
31
+ . venv/bin/activate
32
+ pip install -U build hatch packaging>=23.2 # packaging>=23.2 is needed to build Lite due to https://github.com/pypa/hatch/issues/1381
33
+ pnpm --filter @gradio/client --filter @gradio/lite --filter @gradio/preview build
34
+
35
+ - name: create and publish versions
36
+ id: changesets
37
+ uses: changesets/action@aba318e9165b45b7948c60273e0b72fce0a64eb9 # @v1
38
+ with:
39
+ version: pnpm ci:version
40
+ commit: "chore: update versions"
41
+ title: "chore: update versions"
42
+ publish: pnpm ci:publish
43
+ env:
44
+ NPM_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
45
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
46
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
47
+ - name: add label to skip chromatic build
48
+ if: ${{ steps.changesets.outputs.pullRequestNumber != '' && steps.changesets.outputs.pullRequestNumber != 'undefined' }}
49
+ run: gh pr edit "$PR_NUMBER" --add-label "no-visual-update"
50
+ env:
51
+ PR_NUMBER: ${{ steps.changesets.outputs.pullRequestNumber }}
52
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
53
+ - name: add label to run flaky tests
54
+ if: ${{ steps.changesets.outputs.pullRequestNumber != '' && steps.changesets.outputs.pullRequestNumber != 'undefined' }}
55
+ run: gh pr edit "$PR_NUMBER" --add-label "flaky-tests"
56
+ env:
57
+ PR_NUMBER: ${{ steps.changesets.outputs.pullRequestNumber }}
58
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
59
+ - name: add label to run backend tests on Windows
60
+ if: ${{ steps.changesets.outputs.pullRequestNumber != '' && steps.changesets.outputs.pullRequestNumber != 'undefined' }}
61
+ run: gh pr edit "$PR_NUMBER" --add-label "windows-tests"
62
+ env:
63
+ PR_NUMBER: ${{ steps.changesets.outputs.pullRequestNumber }}
64
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
65
+ - name: publish to pypi
66
+ if: steps.changesets.outputs.hasChangesets != 'true'
67
+ uses: "gradio-app/github/actions/publish-pypi@main"
68
+ env:
69
+ AWS_ACCESS_KEY_ID: ${{ secrets.SDK_AWS_S3_BUCKET_ACCESS_KEY }}
70
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.SDK_AWS_S3_BUCKET_SECRET_ACCESS_KEY }}
71
+ AWS_DEFAULT_REGION: us-west-2
72
+ with:
73
+ user: __token__
74
+ passwords: |
75
+ gradio:${{ secrets.PYPI_API_TOKEN }}
76
+ gradio_client:${{ secrets.PYPI_GRADIO_CLIENT_TOKEN }}
77
+ - name: trigger spaces deploy workflow
78
+ env:
79
+ GITHUB_TOKEN: ${{ secrets.COMMENT_TOKEN }}
80
+ run: gh workflow run previews-build.yml
.github/workflows/semgrep.yml ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: semgrep ci
2
+
3
+ on:
4
+ workflow_run:
5
+ workflows: ["trigger-semgrep"]
6
+ types:
7
+ - completed
8
+
9
+ env:
10
+ CI: true
11
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "1"
12
+
13
+ concurrency:
14
+ group: "${{ github.event.workflow_run.head_repository.full_name }}-${{ github.event.workflow_run.head_branch }}-${{ github.workflow_ref }}"
15
+ cancel-in-progress: true
16
+
17
+ permissions: {}
18
+
19
+ jobs:
20
+ semgrep:
21
+ permissions:
22
+ contents: read
23
+ name: semgrep/ci
24
+ runs-on: ubuntu-latest
25
+ container:
26
+ image: semgrep/semgrep
27
+ options: --volume ${{ github.workspace }}/.github/configs:/mnt/ --name semgrepcontainer
28
+ outputs:
29
+ pr_number: ${{ steps.json.outputs.pr_number }}
30
+ sha: ${{ steps.json.outputs.sha }}
31
+ if: (github.actor != 'dependabot[bot]')
32
+ steps:
33
+ - name: Download artifact
34
+ uses: actions/download-artifact@v4
35
+ with:
36
+ name: changes
37
+ github-token: ${{ secrets.GITHUB_TOKEN }}
38
+ run-id: ${{ github.event.workflow_run.id }}
39
+ - uses: gradio-app/github/actions/json-to-output@main
40
+ id: json
41
+ with:
42
+ path: output.json
43
+ - uses: actions/checkout@v4
44
+ with:
45
+ repository: ${{ steps.json.outputs.source_repo }}
46
+ ref: ${{ steps.json.outputs.sha }}
47
+ - name: restart docker
48
+ uses: docker://docker
49
+ with:
50
+ args: docker restart semgrepcontainer
51
+ - run: ls -la /mnt
52
+ - run: semgrep ci --config=/mnt/semgrep_rules.yaml
53
+ update-status:
54
+ permissions:
55
+ actions: read
56
+ statuses: write
57
+ runs-on: ubuntu-latest
58
+ needs: semgrep
59
+ steps:
60
+ - name: update status
61
+ uses: gradio-app/github/actions/commit-status@main
62
+ with:
63
+ sha: ${{ needs.semgrep.outputs.sha }}
64
+ token: ${{ secrets.GITHUB_TOKEN }}
65
+ name: "Semgrep Results"
66
+ pr: ${{ needs.semgrep.outputs.pr_number }}
67
+ result: ${{ needs.semgrep.result == 'success' && 'success' || 'failure' }}
68
+ type: all