add whisper download utils. add react-navigator.

This commit is contained in:
Jordan
2025-02-16 19:55:26 -08:00
parent 081ac367ba
commit bc3d481d25
11 changed files with 682 additions and 85 deletions

View File

@ -7,12 +7,22 @@ export const MIGRATE_UP = {
libretranslate_base_url TEXT,
ui_direction INTEGER
)`,
],
2: [
`CREATE TABLE IF NOT EXISTS whisper_models (
model TEXT PRIMARY KEY,
bytes_done INTEGER,
bytes_total INTEGER,
)`,
]
}
export const MIGRATE_DOWN = {
1: [
`DROP TABLE IF EXISTS settings`
],
2: [
`DROP TABLE IF EXISTS whisper_models`
]
}

View File

@ -1,4 +1,6 @@
import { SQLiteDatabase } from "expo-sqlite";
import FileSystem from "expo-file-system"
import { getDb } from "./db";
export class Settings {
@ -6,6 +8,7 @@ export class Settings {
"host_language",
"libretranslate_base_url",
'ui_direction',
"wisper_model",
]
constructor(public db: SQLiteDatabase) {
@ -53,4 +56,16 @@ LIMIT 1`
return await this.getValue("libretranslate_base_url")
}
async setWhisperModel(value : string) {
await this.setValue("whisper_model", value);
}
async getWhisperModel() {
return await this.getValue("whisper_model");
}
static async getDefault() {
return new Settings(await getDb())
}
}

190
app/lib/whisper.ts Normal file
View File

@ -0,0 +1,190 @@
import { Platform } from "react-native";
import FileSystem from "expo-file-system";
import { File, Paths } from 'expo-file-system/next';
import { getDb } from "./db";
export const WHISPER_MODEL_PATH = Paths.join(FileSystem.bundleDirectory || "file:///", "whisper");
export const WHISPER_MODEL_DIR = new File(WHISPER_MODEL_PATH);
// Thanks to https://medium.com/@fabi.mofar/downloading-and-saving-files-in-react-native-expo-5b3499adda84
export async function saveFile(
uri: string,
filename: string,
mimetype: string
) {
if (Platform.OS === "android") {
const permissions =
await FileSystem.StorageAccessFramework.requestDirectoryPermissionsAsync();
if (permissions.granted) {
const base64 = await FileSystem.readAsStringAsync(uri, {
encoding: FileSystem.EncodingType.Base64,
});
await FileSystem.StorageAccessFramework.createFileAsync(
permissions.directoryUri,
filename,
mimetype
)
.then(async (uri) => {
await FileSystem.writeAsStringAsync(uri, base64, {
encoding: FileSystem.EncodingType.Base64,
});
})
.catch((e) => console.log(e));
} else {
shareAsync(uri);
}
} else {
shareAsync(uri);
}
}
function shareAsync(uri: string) {
throw new Error("Function not implemented.");
}
export const WHISPER_MODEL_TAGS = ["small", "medium", "large"];
export type whisper_model_tag_t = (typeof WHISPER_MODEL_TAGS)[number];
export const WHISPER_MODELS = {
small: {
source:
"https://huggingface.co/openai/whisper-small/blob/main/pytorch_model.bin",
target: "small.bin",
label: "Small",
},
medium: {
source:
"https://huggingface.co/openai/whisper-medium/blob/main/pytorch_model.bin",
target: "medium.bin",
label: "Medium",
},
large: {
source:
"https://huggingface.co/openai/whisper-large/blob/main/pytorch_model.bin",
target: "large.bin",
label: "Large",
},
} as {
[key: whisper_model_tag_t]: { source: string; target: string; label: string };
};
export function getWhisperTarget(key : whisper_model_tag_t) {
const path = Paths.join(WHISPER_MODEL_DIR, WHISPER_MODELS[key].target);
return new File(path)
}
export type download_status =
| {
status: "not_started" | "complete";
}
| {
status: "in_progress";
bytes: {
total: number;
done: number;
};
};
export async function getModelFileSize(whisper_model: whisper_model_tag_t) {
const target = getWhisperTarget(whisper_model)
if (!target.exists) return undefined;
return target.size;
}
/**
*
* @param whisper_model The whisper model key to check (e.g. `"small"`)
* @returns
*/
export async function getWhisperDownloadStatus(
whisper_model: whisper_model_tag_t
): Promise<download_status> {
// const files = await FileSystem.readDirectoryAsync("file:///whisper");
const result = (await (
await getDb()
).getFirstSync(
`
SELECT (bytes_done, total) WHERE model = ?
`,
[whisper_model]
)) as { bytes_done: number; total: number } | undefined;
if (!result)
return {
status: "not_started",
};
if (result.bytes_done < result.total)
return {
status: "in_progress",
bytes: {
done: result.bytes_done,
total: result.total,
},
};
return {
status: "complete",
};
}
export function whisperFileExists(whisper_model : whisper_model_tag_t) {
const target = getWhisperTarget(whisper_model);
return target.exists
}
export async function downloadWhisperModel(
whisper_model: whisper_model_tag_t,
options: {
force_redownload: boolean;
} = {
force_redownload: false,
}
) {
if (!WHISPER_MODEL_DIR.exists) {
await FileSystem.makeDirectoryAsync(WHISPER_MODEL_PATH, {
intermediates: true,
})
}
const whisperTarget = getWhisperTarget(whisper_model);
// If the target file exists, delete it.
if (whisperTarget.exists) {
if (options.force_redownload) {
whisperTarget.delete()
} else {
console.warn("Whisper model for %s already exists", whisper_model);
return;
}
}
// Initiate a new resumable download.
const spec = WHISPER_MODELS[whisper_model];
const resumable = FileSystem.createDownloadResumable(
spec.source,
whisperTarget.uri,
undefined,
// On each data write, update the whisper model download status.
// Note that since createDownloadResumable callback only works in the foreground,
// a background process will also be updating the file size.
async (data) => {
const db = await getDb();
const args = [
whisper_model,
data.totalBytesWritten,
data.totalBytesExpectedToWrite,
];
await db.runAsync(
`INSERT OR REPLACE INTO whisper_models (model, bytes_done, bytes_remaining) VALUES (?, ?, ?)`,
args
);
}
);
await resumable.downloadAsync();
}