Skip to content

Commit

Permalink
initialize
Browse files Browse the repository at this point in the history
  • Loading branch information
ZeroneDoo committed Sep 2, 2023
0 parents commit 5ef1a7d
Show file tree
Hide file tree
Showing 21 changed files with 4,963 additions and 0 deletions.
6 changes: 6 additions & 0 deletions .env
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
PORT=3000
KOMIKU_URL=https://komiku.com
DRAKORKITA_URL=https://drakorkita.fun
NANIMEX_URL=https://nanimex2.com
KURAMANIME_URL=https://kuramanime.pro
WIBUDESU_URL=https://wibudesu.co
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
node_modules
7 changes: 7 additions & 0 deletions index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
const app = require('./server')

const port = process.env.PORT || 3000

app.listen(port, () => {
console.log(`server runing in http://localhost:${port}`)
})
2,070 changes: 2,070 additions & 0 deletions package-lock.json

Large diffs are not rendered by default.

35 changes: 35 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"name": "komiku-scrapper",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"start": "nodemon index.js",
"dev": "node index.js"
},
"keywords": [
"scrapper api",
"manga api",
"komik api",
"drakor api",
"movie api",
"anime api",
"manhwa api",
"manhua api",
"manga api",
"anime scrapper"
],
"author": "",
"license": "ISC",
"dependencies": {
"axios": "^1.4.0",
"cheerio": "^1.0.0-rc.12",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"jsdom": "^22.1.0",
"nodemon": "^3.0.1",
"puppeteer": "^21.1.0",
"puppeteer-core": "^21.1.0"
}
}
45 changes: 45 additions & 0 deletions server.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
const dotenv = require("dotenv")

dotenv.config()

const express = require("express")
const cors = require("cors")
const drakorkita = require("./src/routes/drakorkita")
const komiku = require("./src/routes/komiku")
const nanimex = require("./src/routes/nanimex")
const kuramanime = require("./src/routes/kuramanime")
const wibudesu = require("./src/routes/wibudesu")

const app = express()

app.use(express.urlencoded({extended: false}))
app.use(express.json())
app.use(cors())

app.use("/drakorkita", drakorkita)
app.use("/komiku", komiku)
app.use("/nanimex", nanimex)
app.use("/kuramanime", kuramanime)
app.use("/wibudesu", wibudesu)
app.get("*", (req, res) => {
res.status(404).json({
message: "404 route not found"
})
})

app.get("/", (req, res) => {
res.status(200).json({
message: "Unofficial Komik, Drakor and Anime APIs",
developed_by: "ZeroneDoo",
github: "https://github.com/ZeroneDoo",
data: {
komiku_url: process.env.KOMIKU_URL,
drakorkita_url: process.env.DRAKORKITA_URL,
kuramanime_url: process.env.KURAMANIME_URL,
nanimex_url: process.env.NANIMEX_URL,
wibudesu_url: process.env.WIBUDESU_URL,
}
})
})

module.exports = app
233 changes: 233 additions & 0 deletions src/controllers/drakorkita.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
const axios = require("axios")
const {
scrapeSeries,
scrapeSeriesUpdated,
scrapeMovie,
scrapeNewMovie,
scrapeOngoingSeries,
scrapeCompletedSeries,
scrapeGenres,
scrapeDetailGenres,
scrapeSearch,
scrapeDetailAllType,
} = require('../scrapers/drakorkita')

const seriesAll = async (req, res) => {
try {
const { page = 1 } = req.query
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}/all?media_type=tv&page=${page}`)

const datas = await scrapeSeries(req, axiosRequest)

res.status(200).json({
message:"success",
page: parseInt(page),
...datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const seriesUpdated = async (req, res) => {
try {
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}`)

const datas = await scrapeSeriesUpdated(req, axiosRequest)

res.status(200).json({
message:"success",
datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const movieAll = async (req, res) => {
try {
const { page = 1 } = req.query
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}/all?media_type=movie&page=${page}`)

const datas = await scrapeMovie(req, axiosRequest)

res.status(200).json({
message:"success",
page: parseInt(page),
...datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const newMovie = async (req, res) => {
try {
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}`)

const datas = await scrapeNewMovie(req, axiosRequest)

res.status(200).json({
message:"success",
datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const ongoingSeries = async (req, res) => {
try {
const { page = 1 } = req.query
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}/all?status=returning&page=${page}`)

const datas = await scrapeOngoingSeries(req, axiosRequest)

res.status(200).json({
message:"success",
page: parseInt(page),
...datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const completedSeries = async (req, res) => {
try {
const { page = 1 } = req.query
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}/all?status=ended&page=${page}`)

const datas = await scrapeCompletedSeries(req, axiosRequest)

res.status(200).json({
message:"success",
page: parseInt(page),
...datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const genres = async (req, res) => {
try {
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}`)

const datas = await scrapeGenres(req, axiosRequest)

res.status(200).json({
message:"success",
datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const detailGenres = async (req, res) => {
try {
const { page = 1 } = req.query
const { endpoint } = req.params
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}/all?genre=${endpoint}&page=${page}`)

const datas = await scrapeDetailGenres({ page, endpoint }, axiosRequest)

res.status(200).json({
message:"success",
page: parseInt(page),
...datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const searchAll = async (req, res) => {
try {
const { s ,page = 1 } = req.query
const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}/all?q=${s}&page=${page}`)

const datas = await scrapeSearch(req, axiosRequest)

res.status(200).json({
message:"success",
page: parseInt(page),
keyword: s,
...datas
})
} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

const detailAllType = async (req, res) => {
try {
const { endpoint } = req.params

const axiosRequest = await axios.get(`${process.env.DRAKORKITA_URL}/detail/${endpoint}`)

const data = await scrapeDetailAllType({ endpoint }, axiosRequest)

res.status(200).json({
message: "success",
data
})

} catch (e) {
console.log(e)

res.json({
message:`${e}`
})
}
}

module.exports = {
seriesAll,
seriesUpdated,
movieAll,
newMovie,
ongoingSeries,
completedSeries,
genres,
detailGenres,
searchAll,
detailAllType,
}
Loading

0 comments on commit 5ef1a7d

Please sign in to comment.