summaryrefslogtreecommitdiff
path: root/packages/backend/src/core/AiService.ts
diff options
context:
space:
mode:
authorsyuilo <Syuilotan@yahoo.co.jp>2022-09-19 03:11:50 +0900
committersyuilo <Syuilotan@yahoo.co.jp>2022-09-19 03:11:50 +0900
commita2eac9fff67f811ed4ac1a80a88fd1f0eafae6c8 (patch)
tree9c7190e05fe0ffe085646cd194c6c65d47375f83 /packages/backend/src/core/AiService.ts
parentrevert (diff)
downloadsharkey-a2eac9fff67f811ed4ac1a80a88fd1f0eafae6c8.tar.gz
sharkey-a2eac9fff67f811ed4ac1a80a88fd1f0eafae6c8.tar.bz2
sharkey-a2eac9fff67f811ed4ac1a80a88fd1f0eafae6c8.zip
test
Diffstat (limited to 'packages/backend/src/core/AiService.ts')
-rw-r--r--packages/backend/src/core/AiService.ts10
1 files changed, 5 insertions, 5 deletions
diff --git a/packages/backend/src/core/AiService.ts b/packages/backend/src/core/AiService.ts
index 1cfc3382a9..e6102a1b91 100644
--- a/packages/backend/src/core/AiService.ts
+++ b/packages/backend/src/core/AiService.ts
@@ -15,7 +15,7 @@ let isSupportedCpu: undefined | boolean = undefined;
@Injectable()
export class AiService {
- #model: nsfw.NSFWJS;
+ private model: nsfw.NSFWJS;
constructor(
@Inject(DI.config)
@@ -26,7 +26,7 @@ export class AiService {
public async detectSensitive(path: string): Promise<nsfw.predictionType[] | null> {
try {
if (isSupportedCpu === undefined) {
- const cpuFlags = await this.#getCpuFlags();
+ const cpuFlags = await this.getCpuFlags();
isSupportedCpu = REQUIRED_CPU_FLAGS.every(required => cpuFlags.includes(required));
}
@@ -37,12 +37,12 @@ export class AiService {
const tf = await import('@tensorflow/tfjs-node');
- if (this.#model == null) this.#model = await nsfw.load(`file://${_dirname}/../../nsfw-model/`, { size: 299 });
+ if (this.model == null) this.model = await nsfw.load(`file://${_dirname}/../../nsfw-model/`, { size: 299 });
const buffer = await fs.promises.readFile(path);
const image = await tf.node.decodeImage(buffer, 3) as any;
try {
- const predictions = await this.#model.classify(image);
+ const predictions = await this.model.classify(image);
return predictions;
} finally {
image.dispose();
@@ -53,7 +53,7 @@ export class AiService {
}
}
- async #getCpuFlags(): Promise<string[]> {
+ private async getCpuFlags(): Promise<string[]> {
const str = await si.cpuFlags();
return str.split(/\s+/);
}