index.cjs 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365
  1. 'use strict';
  2. const promises = require('node:fs/promises');
  3. const node_fs = require('node:fs');
  4. const tar = require('tar');
  5. const pathe = require('pathe');
  6. const defu = require('defu');
  7. const nypm = require('nypm');
  8. const node_stream = require('node:stream');
  9. const node_child_process = require('node:child_process');
  10. const node_os = require('node:os');
  11. const node_util = require('node:util');
  12. const proxy = require('node-fetch-native/proxy');
  13. async function download(url, filePath, options = {}) {
  14. const infoPath = filePath + ".json";
  15. const info = JSON.parse(
  16. await promises.readFile(infoPath, "utf8").catch(() => "{}")
  17. );
  18. const headResponse = await sendFetch(url, {
  19. method: "HEAD",
  20. headers: options.headers
  21. }).catch(() => void 0);
  22. const etag = headResponse?.headers.get("etag");
  23. if (info.etag === etag && node_fs.existsSync(filePath)) {
  24. return;
  25. }
  26. if (typeof etag === "string") {
  27. info.etag = etag;
  28. }
  29. const response = await sendFetch(url, { headers: options.headers });
  30. if (response.status >= 400) {
  31. throw new Error(
  32. `Failed to download ${url}: ${response.status} ${response.statusText}`
  33. );
  34. }
  35. const stream = node_fs.createWriteStream(filePath);
  36. await node_util.promisify(node_stream.pipeline)(response.body, stream);
  37. await promises.writeFile(infoPath, JSON.stringify(info), "utf8");
  38. }
  39. const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w./-]+)?/;
  40. function parseGitURI(input) {
  41. const m = input.match(inputRegex)?.groups || {};
  42. return {
  43. repo: m.repo,
  44. subdir: m.subdir || "/",
  45. ref: m.ref ? m.ref.slice(1) : "main"
  46. };
  47. }
  48. function debug(...args) {
  49. if (process.env.DEBUG) {
  50. console.debug("[giget]", ...args);
  51. }
  52. }
  53. async function sendFetch(url, options = {}) {
  54. if (options.headers?.["sec-fetch-mode"]) {
  55. options.mode = options.headers["sec-fetch-mode"];
  56. }
  57. const res = await proxy.fetch(url, {
  58. ...options,
  59. headers: normalizeHeaders(options.headers)
  60. }).catch((error) => {
  61. throw new Error(`Failed to download ${url}: ${error}`, { cause: error });
  62. });
  63. if (options.validateStatus && res.status >= 400) {
  64. throw new Error(`Failed to fetch ${url}: ${res.status} ${res.statusText}`);
  65. }
  66. return res;
  67. }
  68. function cacheDirectory() {
  69. return process.env.XDG_CACHE_HOME ? pathe.resolve(process.env.XDG_CACHE_HOME, "giget") : pathe.resolve(node_os.homedir(), ".cache/giget");
  70. }
  71. function normalizeHeaders(headers = {}) {
  72. const normalized = {};
  73. for (const [key, value] of Object.entries(headers)) {
  74. if (!value) {
  75. continue;
  76. }
  77. normalized[key.toLowerCase()] = value;
  78. }
  79. return normalized;
  80. }
  81. function currentShell() {
  82. if (process.env.SHELL) {
  83. return process.env.SHELL;
  84. }
  85. if (process.platform === "win32") {
  86. return "cmd.exe";
  87. }
  88. return "/bin/bash";
  89. }
  90. function startShell(cwd) {
  91. cwd = pathe.resolve(cwd);
  92. const shell = currentShell();
  93. console.info(
  94. `(experimental) Opening shell in ${pathe.relative(process.cwd(), cwd)}...`
  95. );
  96. node_child_process.spawnSync(shell, [], {
  97. cwd,
  98. shell: true,
  99. stdio: "inherit"
  100. });
  101. }
  102. const http = async (input, options) => {
  103. if (input.endsWith(".json")) {
  104. return await _httpJSON(input, options);
  105. }
  106. const url = new URL(input);
  107. let name = pathe.basename(url.pathname);
  108. try {
  109. const head = await sendFetch(url.href, {
  110. method: "HEAD",
  111. validateStatus: true,
  112. headers: {
  113. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  114. }
  115. });
  116. const _contentType = head.headers.get("content-type") || "";
  117. if (_contentType.includes("application/json")) {
  118. return await _httpJSON(input, options);
  119. }
  120. const filename = head.headers.get("content-disposition")?.match(/filename="?(.+)"?/)?.[1];
  121. if (filename) {
  122. name = filename.split(".")[0];
  123. }
  124. } catch (error) {
  125. debug(`Failed to fetch HEAD for ${url.href}:`, error);
  126. }
  127. return {
  128. name: `${name}-${url.href.slice(0, 8)}`,
  129. version: "",
  130. subdir: "",
  131. tar: url.href,
  132. defaultDir: name,
  133. headers: {
  134. Authorization: options.auth ? `Bearer ${options.auth}` : void 0
  135. }
  136. };
  137. };
  138. const _httpJSON = async (input, options) => {
  139. const result = await sendFetch(input, {
  140. validateStatus: true,
  141. headers: {
  142. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  143. }
  144. });
  145. const info = await result.json();
  146. if (!info.tar || !info.name) {
  147. throw new Error(
  148. `Invalid template info from ${input}. name or tar fields are missing!`
  149. );
  150. }
  151. return info;
  152. };
  153. const github = (input, options) => {
  154. const parsed = parseGitURI(input);
  155. const githubAPIURL = process.env.GIGET_GITHUB_URL || "https://api.github.com";
  156. return {
  157. name: parsed.repo.replace("/", "-"),
  158. version: parsed.ref,
  159. subdir: parsed.subdir,
  160. headers: {
  161. Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
  162. Accept: "application/vnd.github+json",
  163. "X-GitHub-Api-Version": "2022-11-28"
  164. },
  165. url: `${githubAPIURL.replace("api.github.com", "github.com")}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
  166. tar: `${githubAPIURL}/repos/${parsed.repo}/tarball/${parsed.ref}`
  167. };
  168. };
  169. const gitlab = (input, options) => {
  170. const parsed = parseGitURI(input);
  171. const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
  172. return {
  173. name: parsed.repo.replace("/", "-"),
  174. version: parsed.ref,
  175. subdir: parsed.subdir,
  176. headers: {
  177. authorization: options.auth ? `Bearer ${options.auth}` : void 0,
  178. // https://gitlab.com/gitlab-org/gitlab/-/commit/50c11f278d18fe1f3fb12eb595067216bb58ade2
  179. "sec-fetch-mode": "same-origin"
  180. },
  181. url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
  182. tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
  183. };
  184. };
  185. const bitbucket = (input, options) => {
  186. const parsed = parseGitURI(input);
  187. return {
  188. name: parsed.repo.replace("/", "-"),
  189. version: parsed.ref,
  190. subdir: parsed.subdir,
  191. headers: {
  192. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  193. },
  194. url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
  195. tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
  196. };
  197. };
  198. const sourcehut = (input, options) => {
  199. const parsed = parseGitURI(input);
  200. return {
  201. name: parsed.repo.replace("/", "-"),
  202. version: parsed.ref,
  203. subdir: parsed.subdir,
  204. headers: {
  205. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  206. },
  207. url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
  208. tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
  209. };
  210. };
  211. const providers = {
  212. http,
  213. https: http,
  214. github,
  215. gh: github,
  216. gitlab,
  217. bitbucket,
  218. sourcehut
  219. };
  220. const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
  221. const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options = {}) => {
  222. return async (input) => {
  223. const start = Date.now();
  224. const registryURL = `${registryEndpoint}/${input}.json`;
  225. const result = await sendFetch(registryURL, {
  226. headers: {
  227. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  228. }
  229. });
  230. if (result.status >= 400) {
  231. throw new Error(
  232. `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
  233. );
  234. }
  235. const info = await result.json();
  236. if (!info.tar || !info.name) {
  237. throw new Error(
  238. `Invalid template info from ${registryURL}. name or tar fields are missing!`
  239. );
  240. }
  241. debug(
  242. `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
  243. );
  244. return info;
  245. };
  246. };
  247. const sourceProtoRe = /^([\w-.]+):/;
  248. async function downloadTemplate(input, options = {}) {
  249. options = defu.defu(
  250. {
  251. registry: process.env.GIGET_REGISTRY,
  252. auth: process.env.GIGET_AUTH
  253. },
  254. options
  255. );
  256. const registry = options.registry === false ? void 0 : registryProvider(options.registry, { auth: options.auth });
  257. let providerName = options.provider || (registry ? "registry" : "github");
  258. let source = input;
  259. const sourceProvierMatch = input.match(sourceProtoRe);
  260. if (sourceProvierMatch) {
  261. providerName = sourceProvierMatch[1];
  262. source = input.slice(sourceProvierMatch[0].length);
  263. if (providerName === "http" || providerName === "https") {
  264. source = input;
  265. }
  266. }
  267. const provider = options.providers?.[providerName] || providers[providerName] || registry;
  268. if (!provider) {
  269. throw new Error(`Unsupported provider: ${providerName}`);
  270. }
  271. const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
  272. throw new Error(
  273. `Failed to download template from ${providerName}: ${error.message}`
  274. );
  275. });
  276. if (!template) {
  277. throw new Error(`Failed to resolve template from ${providerName}`);
  278. }
  279. template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
  280. template.defaultDir = (template.defaultDir || template.name).replace(
  281. /[^\da-z-]/gi,
  282. "-"
  283. );
  284. const temporaryDirectory = pathe.resolve(
  285. cacheDirectory(),
  286. providerName,
  287. template.name
  288. );
  289. const tarPath = pathe.resolve(
  290. temporaryDirectory,
  291. (template.version || template.name) + ".tar.gz"
  292. );
  293. if (options.preferOffline && node_fs.existsSync(tarPath)) {
  294. options.offline = true;
  295. }
  296. if (!options.offline) {
  297. await promises.mkdir(pathe.dirname(tarPath), { recursive: true });
  298. const s2 = Date.now();
  299. await download(template.tar, tarPath, {
  300. headers: {
  301. Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
  302. ...normalizeHeaders(template.headers)
  303. }
  304. }).catch((error) => {
  305. if (!node_fs.existsSync(tarPath)) {
  306. throw error;
  307. }
  308. debug("Download error. Using cached version:", error);
  309. options.offline = true;
  310. });
  311. debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
  312. }
  313. if (!node_fs.existsSync(tarPath)) {
  314. throw new Error(
  315. `Tarball not found: ${tarPath} (offline: ${options.offline})`
  316. );
  317. }
  318. const cwd = pathe.resolve(options.cwd || ".");
  319. const extractPath = pathe.resolve(cwd, options.dir || template.defaultDir);
  320. if (options.forceClean) {
  321. await promises.rm(extractPath, { recursive: true, force: true });
  322. }
  323. if (!options.force && node_fs.existsSync(extractPath) && node_fs.readdirSync(extractPath).length > 0) {
  324. throw new Error(`Destination ${extractPath} already exists.`);
  325. }
  326. await promises.mkdir(extractPath, { recursive: true });
  327. const s = Date.now();
  328. const subdir = template.subdir?.replace(/^\//, "") || "";
  329. await tar.extract({
  330. file: tarPath,
  331. cwd: extractPath,
  332. onentry(entry) {
  333. entry.path = entry.path.split("/").splice(1).join("/");
  334. if (subdir) {
  335. if (entry.path.startsWith(subdir + "/")) {
  336. entry.path = entry.path.slice(subdir.length);
  337. } else {
  338. entry.path = "";
  339. }
  340. }
  341. }
  342. });
  343. debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
  344. if (options.install) {
  345. debug("Installing dependencies...");
  346. await nypm.installDependencies({
  347. cwd: extractPath,
  348. silent: options.silent
  349. });
  350. }
  351. return {
  352. ...template,
  353. source,
  354. dir: extractPath
  355. };
  356. }
  357. exports.downloadTemplate = downloadTemplate;
  358. exports.registryProvider = registryProvider;
  359. exports.startShell = startShell;