Vítor Vasconcellos 48afea5a4b [ENG-1184, ENG-1286, ENG-1330] Rework native dependencies (+ deb fixes) (#1685)
* Almost working

* Downgrade libplacebo
 - FFMpeg 6.0 uses some now removed deprecated functions

* Use -Oz for zimg

* Fix CI script to run the new ffmpeg build script

* Fix heif step name + Ignore docker cache while building in CI

* Fix Opencl build on linux

* Fix adding incorrect -target argument to linker
 - Update zig for windows target

* Disable opengl for ffmpeg, it only uses it as an outdev, not for processing
 - Disable opengl and directx for libplacebo, ffmpeg only supports vulkan when using it
 - Add WIN32_LEAN_AND_MEAN to global cflags to optimize windows api usage
 - Fix 99-heif.sh incorrect bsdtar flag

* Remove WIN32_LEAN_AND_MEAN from global CFLAGS as that was breaking OpenCL build
 - Fix Dockerfile step for cleaning up the out dir
 - Improve licensing handling

* x86_64 windows and linux builds are working

* Fix aarch64 build for windows and linux

* Fix symbol visibility in linux builds
 - Fix soxr failing to download due to sourcefourge
 - Only patch zimg on windows targets
 - Tell cmake to hide libheif symbols

* Fix Linux .so rpath
 - Add lzo dependency
 - Publish source for the built libs
 - Add warning for missing nasm in tauri.mjs
 - Remove ffmpeg install from setup.sh
 - Add download logic for our linux ffmpeg bundle in preprep.mjs

* Remove jobs, docker doesn't support this

* Fix typing

* Change ffmpeg references to native deps
 - Rename FFMpeg.framework to Spacedrive.framework
 - Centralize the macOS native deps build with the windows and linux one
 - Change the preprep script to only download our native deps
 - Remove old macOS ffmpeg build scripts

* Compress native deps before creating github artifact
 - The zip implementation for github artifact does not mantain symlinks and permissions
 - Remove conditional protoc, it is now always included

* Don't strip dylibs, it was breaking them
 - Only download macOS Framework for darwin targets
 - Fix preprep script
 - Improve README.md for native-deps
 - Fix not finding native-deps src

* Attempt to fix macOS dylib

* Fix macOS dylibs
 - Replace lld.ld64 with apple's own linker
 - Add stages for building apple's compiler tools to use instead of LLVM ones

* Ensure sourced file exists

* All targets should build now
 - Fix environment sourcing in build.sh
 - Some minor improvements to cc.sh
 - Fix incorrect flag in zlib.sh
 - Improve how -f[...] flags are passed to compiler and linker
 - Add more stack hardening flags

* We now can support macOS 11.0 on arm64

* Improve macOS Framework generation
 - Remove installed unused deps
 - Improve cleanup and organization logic in Dockerfile last step
 - Move libav* .dll.a to .lib to fix missing files in windows target
 - Remove apple tools from /srv folder after installation to prevent their files from being copied by other stage steps
 - Create all the necessary symlinks for the macOS targets while building
 - Remove symlink logic for macOS target from preprep.mjs

* Remove native-deps from spacedrive repo
 - It now resides in https://github.com/spacedriveapp/native-deps
 - Modify preprep script to dowload native-deps from new location
 - Remove Github API code from scripts (not needed anymore)
 - Add flock.mjs to allow running tauri.mjs cleanup as soon as cargo finishes building in linux

* Handle flock not present in system
 - Allow macOS to try using flock

* Fix preprep on macOS

* Add script that patch deb to fix errors and warnings raised by lintian

* Fix ctrl+c/ctrl+v typo

* Remove gstreamer1.0-gtk3 from deb dependencies

* eval is evil

* Handle tauri build release with an explicit target in fix-deb.sh

* Preserve environment variables when re-executing fix-deb with sudo

* Only execute fix-deb.sh when building a deb bundle

* Improvements fix-deb.sh

* Improve setup.sh (Add experiemental alpine support)
2023-11-17 19:20:14 +00:00

139 lines
3.7 KiB
JavaScript

import * as fs from 'node:fs/promises'
import { dirname, join as joinPath } from 'node:path'
import { env } from 'node:process'
import { fileURLToPath } from 'node:url'
import { fetch, Headers } from 'undici'
const __debug = env.NODE_ENV === 'debug'
const __offline = env.OFFLINE === 'true'
const __filename = fileURLToPath(import.meta.url)
const __dirname = dirname(__filename)
const cacheDir = joinPath(__dirname, '.tmp')
await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 })
/**
* @param {string} resource
* @param {Headers} [headers]
* @returns {Promise<null | {data: Buffer, header: [string, string] | undefined}>}
*/
async function getCache(resource, headers) {
/** @type {Buffer | undefined} */
let data
/** @type {[string, string] | undefined} */
let header
// Don't cache in CI
if (env.CI === 'true') return null
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':')
try {
const cache = JSON.parse(
await fs.readFile(joinPath(cacheDir, Buffer.from(resource).toString('base64url')), {
encoding: 'utf8',
})
)
if (cache && typeof cache === 'object') {
if (cache.etag && typeof cache.etag === 'string') {
header = ['If-None-Match', cache.etag]
} else if (cache.modifiedSince && typeof cache.modifiedSince === 'string') {
header = ['If-Modified-Since', cache.modifiedSince]
}
if (cache.data && typeof cache.data === 'string')
data = Buffer.from(cache.data, 'base64')
}
} catch (error) {
if (__debug) {
console.warn(`CACHE MISS: ${resource}`)
console.error(error)
}
}
return data ? { data, header } : null
}
/**
* @param {import('undici').Response} response
* @param {string} resource
* @param {Buffer} [cachedData]
* @param {Headers} [headers]
* @returns {Promise<Buffer>}
*/
async function setCache(response, resource, cachedData, headers) {
const data = Buffer.from(await response.arrayBuffer())
// Don't cache in CI
if (env.CI === 'true') return data
const etag = response.headers.get('ETag') || undefined
const modifiedSince = response.headers.get('Last-Modified') || undefined
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':')
if (response.status === 304 || (response.ok && data.length === 0)) {
// Cache hit
if (!cachedData) throw new Error('Empty cache hit ????')
return cachedData
}
try {
await fs.writeFile(
joinPath(cacheDir, Buffer.from(resource).toString('base64url')),
JSON.stringify({
etag,
modifiedSince,
data: data.toString('base64'),
}),
{ mode: 0o640, flag: 'w+' }
)
} catch (error) {
if (__debug) {
console.warn(`CACHE WRITE FAIL: ${resource}`)
console.error(error)
}
}
return data
}
/**
* @param {URL | string} resource
* @param {Headers?} [headers]
* @param {boolean} [preferCache]
* @returns {Promise<Buffer>}
*/
export async function get(resource, headers, preferCache) {
if (headers == null) headers = new Headers()
if (resource instanceof URL) resource = resource.toString()
const cache = await getCache(resource, headers)
if (__offline) {
if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`)
return cache.data
}
if (preferCache && cache?.data != null) return cache.data
if (cache?.header) headers.append(...cache.header)
const response = await fetch(resource, { headers })
if (!response.ok) {
if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`)
return cache.data
}
throw new Error(response.statusText)
}
return await setCache(response, resource, cache?.data, headers)
}