index.mjs 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361
  1. import { readFile, writeFile, mkdir, rm } from 'node:fs/promises';
  2. import { existsSync, createWriteStream, readdirSync } from 'node:fs';
  3. import { extract } from 'tar';
  4. import { resolve, relative, basename, dirname } from 'pathe';
  5. import { defu } from 'defu';
  6. import { installDependencies } from 'nypm';
  7. import { pipeline } from 'node:stream';
  8. import { spawnSync } from 'node:child_process';
  9. import { homedir } from 'node:os';
  10. import { promisify } from 'node:util';
  11. import { fetch } from 'node-fetch-native/proxy';
  12. async function download(url, filePath, options = {}) {
  13. const infoPath = filePath + ".json";
  14. const info = JSON.parse(
  15. await readFile(infoPath, "utf8").catch(() => "{}")
  16. );
  17. const headResponse = await sendFetch(url, {
  18. method: "HEAD",
  19. headers: options.headers
  20. }).catch(() => void 0);
  21. const etag = headResponse?.headers.get("etag");
  22. if (info.etag === etag && existsSync(filePath)) {
  23. return;
  24. }
  25. if (typeof etag === "string") {
  26. info.etag = etag;
  27. }
  28. const response = await sendFetch(url, { headers: options.headers });
  29. if (response.status >= 400) {
  30. throw new Error(
  31. `Failed to download ${url}: ${response.status} ${response.statusText}`
  32. );
  33. }
  34. const stream = createWriteStream(filePath);
  35. await promisify(pipeline)(response.body, stream);
  36. await writeFile(infoPath, JSON.stringify(info), "utf8");
  37. }
  38. const inputRegex = /^(?<repo>[\w.-]+\/[\w.-]+)(?<subdir>[^#]+)?(?<ref>#[\w./-]+)?/;
  39. function parseGitURI(input) {
  40. const m = input.match(inputRegex)?.groups || {};
  41. return {
  42. repo: m.repo,
  43. subdir: m.subdir || "/",
  44. ref: m.ref ? m.ref.slice(1) : "main"
  45. };
  46. }
  47. function debug(...args) {
  48. if (process.env.DEBUG) {
  49. console.debug("[giget]", ...args);
  50. }
  51. }
  52. async function sendFetch(url, options = {}) {
  53. if (options.headers?.["sec-fetch-mode"]) {
  54. options.mode = options.headers["sec-fetch-mode"];
  55. }
  56. const res = await fetch(url, {
  57. ...options,
  58. headers: normalizeHeaders(options.headers)
  59. }).catch((error) => {
  60. throw new Error(`Failed to download ${url}: ${error}`, { cause: error });
  61. });
  62. if (options.validateStatus && res.status >= 400) {
  63. throw new Error(`Failed to fetch ${url}: ${res.status} ${res.statusText}`);
  64. }
  65. return res;
  66. }
  67. function cacheDirectory() {
  68. return process.env.XDG_CACHE_HOME ? resolve(process.env.XDG_CACHE_HOME, "giget") : resolve(homedir(), ".cache/giget");
  69. }
  70. function normalizeHeaders(headers = {}) {
  71. const normalized = {};
  72. for (const [key, value] of Object.entries(headers)) {
  73. if (!value) {
  74. continue;
  75. }
  76. normalized[key.toLowerCase()] = value;
  77. }
  78. return normalized;
  79. }
  80. function currentShell() {
  81. if (process.env.SHELL) {
  82. return process.env.SHELL;
  83. }
  84. if (process.platform === "win32") {
  85. return "cmd.exe";
  86. }
  87. return "/bin/bash";
  88. }
  89. function startShell(cwd) {
  90. cwd = resolve(cwd);
  91. const shell = currentShell();
  92. console.info(
  93. `(experimental) Opening shell in ${relative(process.cwd(), cwd)}...`
  94. );
  95. spawnSync(shell, [], {
  96. cwd,
  97. shell: true,
  98. stdio: "inherit"
  99. });
  100. }
  101. const http = async (input, options) => {
  102. if (input.endsWith(".json")) {
  103. return await _httpJSON(input, options);
  104. }
  105. const url = new URL(input);
  106. let name = basename(url.pathname);
  107. try {
  108. const head = await sendFetch(url.href, {
  109. method: "HEAD",
  110. validateStatus: true,
  111. headers: {
  112. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  113. }
  114. });
  115. const _contentType = head.headers.get("content-type") || "";
  116. if (_contentType.includes("application/json")) {
  117. return await _httpJSON(input, options);
  118. }
  119. const filename = head.headers.get("content-disposition")?.match(/filename="?(.+)"?/)?.[1];
  120. if (filename) {
  121. name = filename.split(".")[0];
  122. }
  123. } catch (error) {
  124. debug(`Failed to fetch HEAD for ${url.href}:`, error);
  125. }
  126. return {
  127. name: `${name}-${url.href.slice(0, 8)}`,
  128. version: "",
  129. subdir: "",
  130. tar: url.href,
  131. defaultDir: name,
  132. headers: {
  133. Authorization: options.auth ? `Bearer ${options.auth}` : void 0
  134. }
  135. };
  136. };
  137. const _httpJSON = async (input, options) => {
  138. const result = await sendFetch(input, {
  139. validateStatus: true,
  140. headers: {
  141. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  142. }
  143. });
  144. const info = await result.json();
  145. if (!info.tar || !info.name) {
  146. throw new Error(
  147. `Invalid template info from ${input}. name or tar fields are missing!`
  148. );
  149. }
  150. return info;
  151. };
  152. const github = (input, options) => {
  153. const parsed = parseGitURI(input);
  154. const githubAPIURL = process.env.GIGET_GITHUB_URL || "https://api.github.com";
  155. return {
  156. name: parsed.repo.replace("/", "-"),
  157. version: parsed.ref,
  158. subdir: parsed.subdir,
  159. headers: {
  160. Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
  161. Accept: "application/vnd.github+json",
  162. "X-GitHub-Api-Version": "2022-11-28"
  163. },
  164. url: `${githubAPIURL.replace("api.github.com", "github.com")}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
  165. tar: `${githubAPIURL}/repos/${parsed.repo}/tarball/${parsed.ref}`
  166. };
  167. };
  168. const gitlab = (input, options) => {
  169. const parsed = parseGitURI(input);
  170. const gitlab2 = process.env.GIGET_GITLAB_URL || "https://gitlab.com";
  171. return {
  172. name: parsed.repo.replace("/", "-"),
  173. version: parsed.ref,
  174. subdir: parsed.subdir,
  175. headers: {
  176. authorization: options.auth ? `Bearer ${options.auth}` : void 0,
  177. // https://gitlab.com/gitlab-org/gitlab/-/commit/50c11f278d18fe1f3fb12eb595067216bb58ade2
  178. "sec-fetch-mode": "same-origin"
  179. },
  180. url: `${gitlab2}/${parsed.repo}/tree/${parsed.ref}${parsed.subdir}`,
  181. tar: `${gitlab2}/${parsed.repo}/-/archive/${parsed.ref}.tar.gz`
  182. };
  183. };
  184. const bitbucket = (input, options) => {
  185. const parsed = parseGitURI(input);
  186. return {
  187. name: parsed.repo.replace("/", "-"),
  188. version: parsed.ref,
  189. subdir: parsed.subdir,
  190. headers: {
  191. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  192. },
  193. url: `https://bitbucket.com/${parsed.repo}/src/${parsed.ref}${parsed.subdir}`,
  194. tar: `https://bitbucket.org/${parsed.repo}/get/${parsed.ref}.tar.gz`
  195. };
  196. };
  197. const sourcehut = (input, options) => {
  198. const parsed = parseGitURI(input);
  199. return {
  200. name: parsed.repo.replace("/", "-"),
  201. version: parsed.ref,
  202. subdir: parsed.subdir,
  203. headers: {
  204. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  205. },
  206. url: `https://git.sr.ht/~${parsed.repo}/tree/${parsed.ref}/item${parsed.subdir}`,
  207. tar: `https://git.sr.ht/~${parsed.repo}/archive/${parsed.ref}.tar.gz`
  208. };
  209. };
  210. const providers = {
  211. http,
  212. https: http,
  213. github,
  214. gh: github,
  215. gitlab,
  216. bitbucket,
  217. sourcehut
  218. };
  219. const DEFAULT_REGISTRY = "https://raw.githubusercontent.com/unjs/giget/main/templates";
  220. const registryProvider = (registryEndpoint = DEFAULT_REGISTRY, options = {}) => {
  221. return async (input) => {
  222. const start = Date.now();
  223. const registryURL = `${registryEndpoint}/${input}.json`;
  224. const result = await sendFetch(registryURL, {
  225. headers: {
  226. authorization: options.auth ? `Bearer ${options.auth}` : void 0
  227. }
  228. });
  229. if (result.status >= 400) {
  230. throw new Error(
  231. `Failed to download ${input} template info from ${registryURL}: ${result.status} ${result.statusText}`
  232. );
  233. }
  234. const info = await result.json();
  235. if (!info.tar || !info.name) {
  236. throw new Error(
  237. `Invalid template info from ${registryURL}. name or tar fields are missing!`
  238. );
  239. }
  240. debug(
  241. `Fetched ${input} template info from ${registryURL} in ${Date.now() - start}ms`
  242. );
  243. return info;
  244. };
  245. };
  246. const sourceProtoRe = /^([\w-.]+):/;
  247. async function downloadTemplate(input, options = {}) {
  248. options = defu(
  249. {
  250. registry: process.env.GIGET_REGISTRY,
  251. auth: process.env.GIGET_AUTH
  252. },
  253. options
  254. );
  255. const registry = options.registry === false ? void 0 : registryProvider(options.registry, { auth: options.auth });
  256. let providerName = options.provider || (registry ? "registry" : "github");
  257. let source = input;
  258. const sourceProvierMatch = input.match(sourceProtoRe);
  259. if (sourceProvierMatch) {
  260. providerName = sourceProvierMatch[1];
  261. source = input.slice(sourceProvierMatch[0].length);
  262. if (providerName === "http" || providerName === "https") {
  263. source = input;
  264. }
  265. }
  266. const provider = options.providers?.[providerName] || providers[providerName] || registry;
  267. if (!provider) {
  268. throw new Error(`Unsupported provider: ${providerName}`);
  269. }
  270. const template = await Promise.resolve().then(() => provider(source, { auth: options.auth })).catch((error) => {
  271. throw new Error(
  272. `Failed to download template from ${providerName}: ${error.message}`
  273. );
  274. });
  275. if (!template) {
  276. throw new Error(`Failed to resolve template from ${providerName}`);
  277. }
  278. template.name = (template.name || "template").replace(/[^\da-z-]/gi, "-");
  279. template.defaultDir = (template.defaultDir || template.name).replace(
  280. /[^\da-z-]/gi,
  281. "-"
  282. );
  283. const temporaryDirectory = resolve(
  284. cacheDirectory(),
  285. providerName,
  286. template.name
  287. );
  288. const tarPath = resolve(
  289. temporaryDirectory,
  290. (template.version || template.name) + ".tar.gz"
  291. );
  292. if (options.preferOffline && existsSync(tarPath)) {
  293. options.offline = true;
  294. }
  295. if (!options.offline) {
  296. await mkdir(dirname(tarPath), { recursive: true });
  297. const s2 = Date.now();
  298. await download(template.tar, tarPath, {
  299. headers: {
  300. Authorization: options.auth ? `Bearer ${options.auth}` : void 0,
  301. ...normalizeHeaders(template.headers)
  302. }
  303. }).catch((error) => {
  304. if (!existsSync(tarPath)) {
  305. throw error;
  306. }
  307. debug("Download error. Using cached version:", error);
  308. options.offline = true;
  309. });
  310. debug(`Downloaded ${template.tar} to ${tarPath} in ${Date.now() - s2}ms`);
  311. }
  312. if (!existsSync(tarPath)) {
  313. throw new Error(
  314. `Tarball not found: ${tarPath} (offline: ${options.offline})`
  315. );
  316. }
  317. const cwd = resolve(options.cwd || ".");
  318. const extractPath = resolve(cwd, options.dir || template.defaultDir);
  319. if (options.forceClean) {
  320. await rm(extractPath, { recursive: true, force: true });
  321. }
  322. if (!options.force && existsSync(extractPath) && readdirSync(extractPath).length > 0) {
  323. throw new Error(`Destination ${extractPath} already exists.`);
  324. }
  325. await mkdir(extractPath, { recursive: true });
  326. const s = Date.now();
  327. const subdir = template.subdir?.replace(/^\//, "") || "";
  328. await extract({
  329. file: tarPath,
  330. cwd: extractPath,
  331. onentry(entry) {
  332. entry.path = entry.path.split("/").splice(1).join("/");
  333. if (subdir) {
  334. if (entry.path.startsWith(subdir + "/")) {
  335. entry.path = entry.path.slice(subdir.length);
  336. } else {
  337. entry.path = "";
  338. }
  339. }
  340. }
  341. });
  342. debug(`Extracted to ${extractPath} in ${Date.now() - s}ms`);
  343. if (options.install) {
  344. debug("Installing dependencies...");
  345. await installDependencies({
  346. cwd: extractPath,
  347. silent: options.silent
  348. });
  349. }
  350. return {
  351. ...template,
  352. source,
  353. dir: extractPath
  354. };
  355. }
  356. export { downloadTemplate, registryProvider, startShell };