Added first source and embed
This commit is contained in:
parent
c55f830c30
commit
ffe5e4bb4f
File diff suppressed because it is too large
Load Diff
|
@ -38,6 +38,7 @@
|
|||
"prepublishOnly": "npm test && npm run lint"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.60.0",
|
||||
"@typescript-eslint/parser": "^5.60.0",
|
||||
"eslint": "^8.30.0",
|
||||
|
@ -53,5 +54,10 @@
|
|||
"vite-plugin-dts": "^2.3.0",
|
||||
"vite-plugin-eslint": "^1.8.1",
|
||||
"vitest": "^0.32.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"crypto-js": "^4.1.1",
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
import { describe, expect, it } from 'vitest';
|
||||
import { LOG } from '@/testing/oof';
|
||||
|
||||
describe('oof.ts', () => {
|
||||
it('should contain hello', () => {
|
||||
expect(LOG).toContain('hello');
|
||||
expect('hello').toContain('hello');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import * as FormData from 'form-data';
|
||||
|
||||
export type FetcherOptions = {
|
||||
baseUrl?: string;
|
||||
headers?: Record<string, string>;
|
||||
|
@ -15,10 +17,10 @@ export type DefaultedFetcherOptions = {
|
|||
};
|
||||
|
||||
export type Fetcher<T = any> = {
|
||||
(url: string, ops: DefaultedFetcherOptions): T;
|
||||
(url: string, ops: DefaultedFetcherOptions): Promise<T>;
|
||||
};
|
||||
|
||||
// this feature has some quality of life features
|
||||
export type UseableFetcher<T = any> = {
|
||||
(url: string, ops?: FetcherOptions): T;
|
||||
(url: string, ops?: FetcherOptions): Promise<T>;
|
||||
};
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
import { Embed, Sourcerer } from '@/providers/base';
|
||||
import { upcloudScraper } from '@/providers/embeds/upcloud';
|
||||
import { flixhqScraper } from '@/providers/sources/flixhq/index';
|
||||
import { hasDuplicates, isNotNull } from '@/utils/predicates';
|
||||
|
||||
function gatherAllSources(): Array<Sourcerer | null> {
|
||||
// all sources are gathered here
|
||||
return [];
|
||||
return [flixhqScraper];
|
||||
}
|
||||
|
||||
function gatherAllEmbeds(): Array<Embed | null> {
|
||||
// all embeds are gathered here
|
||||
return [];
|
||||
return [upcloudScraper];
|
||||
}
|
||||
|
||||
export interface ProviderList {
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
import { AES, enc } from 'crypto-js';
|
||||
|
||||
import { makeEmbed } from '@/providers/base';
|
||||
|
||||
interface StreamRes {
|
||||
server: number;
|
||||
sources: string;
|
||||
tracks: {
|
||||
file: string;
|
||||
kind: 'captions' | 'thumbnails';
|
||||
label: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
function isJSON(json: string) {
|
||||
try {
|
||||
JSON.parse(json);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const upcloudScraper = makeEmbed({
|
||||
id: 'upcloud',
|
||||
name: 'UpCloud',
|
||||
rank: 200,
|
||||
async scrape(ctx) {
|
||||
// Example url: https://dokicloud.one/embed-4/{id}?z=
|
||||
const parsedUrl = new URL(ctx.url.replace('embed-5', 'embed-4'));
|
||||
|
||||
const dataPath = parsedUrl.pathname.split('/');
|
||||
const dataId = dataPath[dataPath.length - 1];
|
||||
|
||||
const streamRes = await ctx.proxiedFetcher<StreamRes>(`${parsedUrl.origin}/ajax/embed-4/getSources?id=${dataId}`, {
|
||||
headers: {
|
||||
Referer: parsedUrl.origin,
|
||||
'X-Requested-With': 'XMLHttpRequest',
|
||||
},
|
||||
});
|
||||
|
||||
let sources: { file: string; type: string } | null = null;
|
||||
|
||||
if (!isJSON(streamRes.sources)) {
|
||||
const decryptionKey = JSON.parse(
|
||||
await ctx.proxiedFetcher<string>(`https://raw.githubusercontent.com/enimax-anime/key/e4/key.txt`),
|
||||
) as [number, number][];
|
||||
|
||||
let extractedKey = '';
|
||||
const sourcesArray = streamRes.sources.split('');
|
||||
for (const index of decryptionKey) {
|
||||
for (let i: number = index[0]; i < index[1]; i += 1) {
|
||||
extractedKey += streamRes.sources[i];
|
||||
sourcesArray[i] = '';
|
||||
}
|
||||
}
|
||||
|
||||
const decryptedStream = AES.decrypt(sourcesArray.join(''), extractedKey).toString(enc.Utf8);
|
||||
const parsedStream = JSON.parse(decryptedStream)[0];
|
||||
if (!parsedStream) throw new Error('No stream found');
|
||||
sources = parsedStream;
|
||||
}
|
||||
|
||||
if (!sources) throw new Error('upcloud source not found');
|
||||
|
||||
return {
|
||||
stream: {
|
||||
type: 'hls',
|
||||
playlist: sources.file,
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
|
@ -1,28 +0,0 @@
|
|||
import { makeSourcerer } from '@/providers/base';
|
||||
|
||||
export const flixHq = makeSourcerer({
|
||||
id: 'flixhq',
|
||||
name: 'FlixHQ',
|
||||
rank: 500,
|
||||
|
||||
async scrapeShow(_input) {
|
||||
return {
|
||||
embeds: [],
|
||||
stream: {
|
||||
type: 'file',
|
||||
qualities: {
|
||||
'360': {
|
||||
type: 'mp4',
|
||||
url: 'blabal.mp4',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async scrapeMovie(_input) {
|
||||
return {
|
||||
embeds: [],
|
||||
};
|
||||
},
|
||||
});
|
|
@ -0,0 +1 @@
|
|||
export const flixHqBase = 'https://flixhq.to';
|
|
@ -0,0 +1,28 @@
|
|||
import { makeSourcerer } from '@/providers/base';
|
||||
import { getFlixhqSourceDetails, getFlixhqSources } from '@/providers/sources/flixhq/scrape';
|
||||
import { getFlixhqId } from '@/providers/sources/flixhq/search';
|
||||
import { NotFoundError } from '@/utils/errors';
|
||||
|
||||
// TODO tv shows are available in flixHQ, just no scraper yet
|
||||
export const flixhqScraper = makeSourcerer({
|
||||
id: 'flixhq',
|
||||
name: 'FlixHQ',
|
||||
rank: 100,
|
||||
async scrapeMovie(ctx) {
|
||||
const id = await getFlixhqId(ctx, ctx.media);
|
||||
if (!id) throw new NotFoundError();
|
||||
|
||||
const sources = await getFlixhqSources(ctx, id);
|
||||
const upcloudStream = sources.find((v) => v.embed.toLowerCase() === 'upcloud');
|
||||
if (!upcloudStream) throw new NotFoundError('upcloud stream not found for flixhq');
|
||||
|
||||
return {
|
||||
embeds: [
|
||||
{
|
||||
embedId: '', // TODO embed id
|
||||
url: await getFlixhqSourceDetails(ctx, upcloudStream.episodeId),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
});
|
|
@ -0,0 +1,37 @@
|
|||
import { load } from 'cheerio';
|
||||
|
||||
import { flixHqBase } from '@/providers/sources/flixhq/common';
|
||||
import { ScrapeContext } from '@/utils/context';
|
||||
|
||||
export async function getFlixhqSources(ctx: ScrapeContext, id: string) {
|
||||
const type = id.split('/')[0];
|
||||
const episodeParts = id.split('-');
|
||||
const episodeId = episodeParts[episodeParts.length - 1];
|
||||
|
||||
const data = await ctx.proxiedFetcher<string>(`/ajax/${type}/episodes/${episodeId}`, {
|
||||
baseUrl: flixHqBase,
|
||||
});
|
||||
const doc = load(data);
|
||||
const sourceLinks = doc('.nav-item > a')
|
||||
.toArray()
|
||||
.map((el) => {
|
||||
const query = doc(el);
|
||||
const embedTitle = query.attr('title');
|
||||
const linkId = query.attr('data-linkid');
|
||||
if (!embedTitle || !linkId) throw new Error('invalid sources');
|
||||
return {
|
||||
embed: embedTitle,
|
||||
episodeId: linkId,
|
||||
};
|
||||
});
|
||||
|
||||
return sourceLinks;
|
||||
}
|
||||
|
||||
export async function getFlixhqSourceDetails(ctx: ScrapeContext, sourceId: string): Promise<string> {
|
||||
const jsonData = await ctx.proxiedFetcher<Record<string, any>>(`/ajax/sources/${sourceId}`, {
|
||||
baseUrl: flixHqBase,
|
||||
});
|
||||
|
||||
return jsonData.link;
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
import { load } from 'cheerio';
|
||||
|
||||
import { MovieMedia } from '@/main/media';
|
||||
import { flixHqBase } from '@/providers/sources/flixhq/common';
|
||||
import { compareMedia } from '@/utils/compare';
|
||||
import { ScrapeContext } from '@/utils/context';
|
||||
|
||||
export async function getFlixhqId(ctx: ScrapeContext, media: MovieMedia): Promise<string | null> {
|
||||
const searchResults = await ctx.proxiedFetcher<string>(`/search/${media.title.replaceAll(/[^a-z0-9A-Z]/g, '-')}`, {
|
||||
baseUrl: flixHqBase,
|
||||
});
|
||||
|
||||
const doc = load(searchResults);
|
||||
const items = doc('.film_list-wrap > div.flw-item')
|
||||
.toArray()
|
||||
.map((el) => {
|
||||
const query = doc(el);
|
||||
const id = query.find('div.film-poster > a').attr('href')?.slice(1);
|
||||
const title = query.find('div.film-detail > h2 > a').attr('title');
|
||||
const year = query.find('div.film-detail > div.fd-infor > span:nth-child(1)').text();
|
||||
|
||||
if (!id || !title || !year) return null;
|
||||
return {
|
||||
id,
|
||||
title,
|
||||
year: +year,
|
||||
};
|
||||
});
|
||||
|
||||
const matchingItem = items.find((v) => v && compareMedia(media, v.title, v.year));
|
||||
|
||||
if (!matchingItem) return null;
|
||||
return matchingItem.id;
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
import { CommonMedia } from '@/main/media';
|
||||
|
||||
export function normalizeTitle(title: string): string {
|
||||
return title
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/['":]/g, '')
|
||||
.replace(/[^a-zA-Z0-9]+/g, '_');
|
||||
}
|
||||
|
||||
export function compareTitle(a: string, b: string): boolean {
|
||||
return normalizeTitle(a) === normalizeTitle(b);
|
||||
}
|
||||
|
||||
export function compareMedia(media: CommonMedia, title: string, releaseYear?: number): boolean {
|
||||
// if no year is provided, count as if its the correct year
|
||||
const isSameYear = releaseYear === undefined ? true : media.releaseYear === releaseYear;
|
||||
return compareTitle(media.title, title) && isSameYear;
|
||||
}
|
|
@ -1,8 +1,8 @@
|
|||
import { UseableFetcher } from '@/fetchers/types';
|
||||
|
||||
export type ScrapeContext = {
|
||||
proxiedFetcher: UseableFetcher;
|
||||
fetcher: UseableFetcher;
|
||||
proxiedFetcher: <T>(...params: Parameters<UseableFetcher<T>>) => ReturnType<UseableFetcher<T>>;
|
||||
fetcher: <T>(...params: Parameters<UseableFetcher<T>>) => ReturnType<UseableFetcher<T>>;
|
||||
progress(val: number): void;
|
||||
};
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2018",
|
||||
"lib": ["es2018", "DOM"],
|
||||
"target": "es2021",
|
||||
"lib": ["es2021"],
|
||||
"module": "commonjs",
|
||||
"declaration": true,
|
||||
"outDir": "./lib",
|
||||
|
@ -9,8 +9,10 @@
|
|||
"baseUrl": "src",
|
||||
"experimentalDecorators": true,
|
||||
"isolatedModules": false,
|
||||
"types": ["vitest/globals"],
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
"@/*": ["./*"],
|
||||
"@entrypoint": ["./index.ts"]
|
||||
}
|
||||
},
|
||||
"include": ["src"],
|
||||
|
|
|
@ -27,4 +27,7 @@ module.exports = defineConfig({
|
|||
fileName: 'providers',
|
||||
},
|
||||
},
|
||||
test: {
|
||||
globals: true,
|
||||
},
|
||||
});
|
||||
|
|
Loading…
Reference in New Issue