aboutsummaryrefslogtreecommitdiff
path: root/pages/api/v2/etc
diff options
context:
space:
mode:
authorFactiven <[email protected]>2023-12-24 13:03:54 +0700
committerFactiven <[email protected]>2023-12-24 13:03:54 +0700
commit50a0f0240d7fef133eb5acc1bea2b1168b08e9db (patch)
tree307e09e505580415a58d64b5fc3580e9235869f1 /pages/api/v2/etc
parentUpdate README.md (#104) (diff)
downloadmoopa-50a0f0240d7fef133eb5acc1bea2b1168b08e9db.tar.xz
moopa-50a0f0240d7fef133eb5acc1bea2b1168b08e9db.zip
migrate to typescript
Diffstat (limited to 'pages/api/v2/etc')
-rw-r--r--pages/api/v2/etc/recent/[page].js57
-rw-r--r--pages/api/v2/etc/recent/[page].tsx81
-rw-r--r--pages/api/v2/etc/schedule/index.tsx (renamed from pages/api/v2/etc/schedule/index.js)35
3 files changed, 109 insertions, 64 deletions
diff --git a/pages/api/v2/etc/recent/[page].js b/pages/api/v2/etc/recent/[page].js
deleted file mode 100644
index 2ff22ea..0000000
--- a/pages/api/v2/etc/recent/[page].js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { rateLimitStrict, redis } from "@/lib/redis";
-
-let API_URL;
-API_URL = process.env.API_URI || null;
-if (API_URL && API_URL.endsWith("/")) {
- API_URL = API_URL.slice(0, -1);
-}
-
-export default async function handler(req, res) {
- try {
- if (redis) {
- try {
- const ipAddress = req.socket.remoteAddress;
- await rateLimitStrict.consume(ipAddress);
- } catch (error) {
- return res.status(429).json({
- error: `Too Many Requests, retry after ${error.msBeforeNext / 1000}`,
- });
- }
- }
-
- let cache;
-
- if (redis) {
- cache = await redis.get(`recent-episode`);
- }
-
- if (cache) {
- return res.status(200).json({ results: JSON.parse(cache) });
- } else {
- const page = req.query.page || 1;
-
- var hasNextPage = true;
- var datas = [];
-
- async function fetchData(page) {
- const data = await fetch(
- `https://api.anify.tv/recent?type=anime&page=${page}&perPage=45`
- ).then((res) => res.json());
-
- // const filtered = data?.results?.filter((i) => i.type !== "ONA");
- // hasNextPage = data?.hasNextPage;
- datas = data;
- }
-
- await fetchData(page);
-
- if (redis) {
- await redis.set(`recent-episode`, JSON.stringify(datas), "EX", 60 * 60);
- }
-
- return res.status(200).json({ results: datas });
- }
- } catch (error) {
- res.status(500).json({ error });
- }
-}
diff --git a/pages/api/v2/etc/recent/[page].tsx b/pages/api/v2/etc/recent/[page].tsx
new file mode 100644
index 0000000..e49591c
--- /dev/null
+++ b/pages/api/v2/etc/recent/[page].tsx
@@ -0,0 +1,81 @@
+import { rateLimitStrict, redis } from "@/lib/redis";
+import { AnifyRecentEpisode } from "@/utils/types";
+import axios from "axios";
+import { NextApiRequest, NextApiResponse } from "next";
+
+let API_URL: string | null;
+API_URL = process.env.API_URI || null;
+if (API_URL && API_URL.endsWith("/")) {
+ API_URL = API_URL.slice(0, -1);
+}
+
+export default async function handler(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
+ try {
+ if (redis) {
+ try {
+ const ipAddress: any = req.socket.remoteAddress;
+ await rateLimitStrict?.consume(ipAddress);
+ } catch (error: any) {
+ return res.status(429).json({
+ error: `Too Many Requests, retry after ${error.msBeforeNext / 1000}`,
+ });
+ }
+ }
+
+ let cache;
+
+ if (redis) {
+ cache = await redis.get(`recent-episode`);
+ }
+
+ if (cache) {
+ return res.status(200).json({ results: JSON.parse(cache) });
+ } else {
+ const page = req.query.page || 1;
+
+ var hasNextPage = true;
+ let datas: AnifyRecentEpisode[] = [];
+
+ const fetchData = async (page: any) => {
+ const { data } = await axios.get(
+ `https://api.anify.tv/recent?type=anime&page=${page}&perPage=45&fields=[id,slug,title,currentEpisode,coverImage,episodes]`
+ );
+
+ // const filtered = data?.results?.filter((i) => i.type !== "ONA");
+ // hasNextPage = data?.hasNextPage;
+
+ const newData = data.map((i: AnifyRecentEpisode) => {
+ const getGogo = i.episodes?.data?.find(
+ (x) => x.providerId === "gogoanime"
+ );
+ const getGogoEpisode = getGogo?.episodes?.find(
+ (x) => x.number === i.currentEpisode
+ );
+
+ return {
+ id: i.id,
+ slug: getGogoEpisode?.id,
+ title: i.title,
+ currentEpisode: i.currentEpisode,
+ coverImage: i.coverImage,
+ };
+ });
+
+ datas = newData;
+ };
+
+ await fetchData(page);
+
+ if (redis) {
+ await redis.set(`recent-episode`, JSON.stringify(datas), "EX", 60 * 60);
+ }
+
+ return res.status(200).json({ results: datas });
+ }
+ } catch (error) {
+ res.status(500).json({ error });
+ }
+}
diff --git a/pages/api/v2/etc/schedule/index.js b/pages/api/v2/etc/schedule/index.tsx
index 2ddc82a..e6f0b26 100644
--- a/pages/api/v2/etc/schedule/index.js
+++ b/pages/api/v2/etc/schedule/index.tsx
@@ -1,6 +1,7 @@
import axios from "axios";
import cron from "cron";
import { rateLimiterRedis, redis } from "@/lib/redis";
+import { NextApiRequest, NextApiResponse } from "next";
// Function to fetch new data
async function fetchData() {
@@ -37,22 +38,42 @@ const job = new cron.CronJob("0 0 * * 1", () => {
});
job.start();
-export default async function handler(req, res) {
+interface Title {
+ romaji: string;
+ english: string;
+ native: string;
+}
+
+type CachedData = {
+ id: string;
+ title: Title;
+ coverImage: string;
+ bannerImage: string;
+ airingAt: number;
+ airingEpisode: number;
+};
+
+export default async function handler(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
try {
- let cached;
+ let cached: CachedData | null = null;
if (redis) {
try {
- const ipAddress = req.socket.remoteAddress;
- await rateLimiterRedis.consume(ipAddress);
- } catch (error) {
+ const ipAddress: any = req.socket.remoteAddress;
+ await rateLimiterRedis?.consume(ipAddress);
+ } catch (error: any) {
return res.status(429).json({
error: `Too Many Requests, retry after ${error.msBeforeNext / 1000}`,
});
}
- cached = await redis.get("schedule");
+ const cachedData = await redis.get("schedule");
+ cached = cachedData ? JSON.parse(cachedData) : null;
}
+
if (cached) {
- return res.status(200).json(JSON.parse(cached));
+ return res.status(200).json(cached);
} else {
const data = await fetchData();