Skip to content

🚀 Serve Frontend with FastAPI #15

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 3 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 24 additions & 14 deletions fastcrawler_ui/core/fastapi/app.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,33 @@
from pathlib import Path

from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles

from .routers import get_routers


def get_application(router=get_routers(), base_app=FastAPI) -> FastAPI:
app = base_app()
origins = ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(router)
app.mount("/", StaticFiles(directory="frontend/dist", html=True), name="static")
return app
class ApplicationFactory:
def __init__(self, router=get_routers(), base_app=FastAPI, dist_frontend=None):
self.router = router
self.base_app = base_app
self.dist_frontend = (
dist_frontend or Path(__file__).parent.parent.parent.parent / "frontend" / "dist"
)

def create_application(self) -> FastAPI:
app = self.base_app()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(self.router)
app.mount("/", StaticFiles(directory=self.dist_frontend, html=True), name="static")
return app


app = get_application()
def create_app(factory=ApplicationFactory()) -> FastAPI:
return factory.create_application()
4 changes: 3 additions & 1 deletion fastcrawler_ui/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@

from fastcrawler import FastCrawler

from fastcrawler_ui.core.fastapi.app import app
from fastcrawler_ui.core.fastapi.app import create_app
from fastcrawler_ui.core.fastapi.server import UvicornConfig, UvicornServer
from fastcrawler_ui.core.fastapi.sync import sync_crawler_to_fastapi

app = create_app()


async def run_async(
crawler: FastCrawler,
Expand Down

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion frontend/dist/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<link rel="icon" type="image/svg+xml" href="/logo.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Fast Crawler Panel</title>
<script type="module" crossorigin src="/assets/index-25e06ec3.js"></script>
<script type="module" crossorigin src="/assets/index-0f5c240a.js"></script>
<link rel="stylesheet" href="/assets/index-ae84fa14.css">
</head>
<body>
Expand Down
196 changes: 89 additions & 107 deletions frontend/src/pages/index.tsx
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import { useEffect, useState } from "react";
import { useNavigate } from "react-router-dom";

import BaseFrame from "../components/Base/Frame";
import BaseChart from "../components/Base/Chart";
import { IOverviewData } from "../constants/types";

function index() {
const [requests, setRequest] = useState({
const [requests, setRequests] = useState({
labels: ["", "", "", "", "", ""],
data: [0, 0, 0, 0, 0, 0],
data1: [0, 0, 0, 0, 0, 0],
Expand All @@ -22,117 +23,98 @@ function index() {
failedRequests: 0,
});

const navigate = useNavigate();

useEffect(() => {
fetchOverviewData();
fetchChartData();
}, []);
const abortController = new AbortController();
const signal = abortController.signal;

const fetchOverviewData = async () => {
try {
const response = await fetch(
"http://127.0.0.1:8001/dashboard/crawlers",
{ signal }
);
const reader = response.body!.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
console.log("Stream finished");
break;
}
const chunkString = new TextDecoder().decode(value);
const resData = JSON.parse(chunkString);
const time = new Date(resData.data.time).toLocaleString();
setOverviewData((prevData) => ({
...prevData,
currentTime: time,
allCrawlers: resData.data.all_crawlers,
activeCrawlers: resData.data.active_crawlers,
deactiveCrawlers: resData.data.deactive_crawlers,
}));
}
} catch (error) {
console.warn("Warning in fetching overview data:", error);
}
};

const fetchOverviewData = () => {
fetch("http://127.0.0.1:8001/dashboard/crawlers")
.then((response) => {
const stream = response.body;
const reader = stream!.getReader();
const readChunk = () => {
reader
.read()
.then(({ value, done }) => {
if (done) {
console.log("Stream finished");
return;
}
const chunkString = new TextDecoder().decode(value);
try {
const resData = JSON.parse(chunkString);
let time = new Date(resData.data.time).toLocaleString();
setOverviewData((prevData) => ({
...prevData,
currentTime: time,
allCrawlers: resData.data.all_crawlers,
activeCrawlers: resData.data.active_crawlers,
deactiveCrawlers: resData.data.deactive_crawlers,
}));
} catch (error) {
console.log(error);
}
const fetchChartData = async () => {
try {
const response = await fetch("http://127.0.0.1:8001/dashboard/chart", {
signal,
});
const reader = response.body!.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
console.log("Stream finished");
break;
}
const chunkString = new TextDecoder().decode(value);
const resData = JSON.parse(chunkString);
let time = resData.data.time.split("T")[1];
let second = Math.floor(+time.split(":")[2]);
time = time.split(":")[0] + ":" + time.split(":")[1] + ":" + second;

readChunk();
})
.catch((error) => {
console.error(error);
});
};
readChunk();
})
.catch((error) => {
console.error(error);
});
};
setRequests((prevData) => {
const newData = [...prevData.data, resData.data.all_requests];
const newData1 = [
...prevData.data1,
resData.data.successful_requests,
];
const newData2 = [...prevData.data2, resData.data.failed_requests];
const newLabels = [...prevData.labels, time];
newData.splice(0, 1);
newData1.splice(0, 1);
newData2.splice(0, 1);
newLabels.splice(0, 1);
return {
data: newData,
data1: newData1,
data2: newData2,
labels: newLabels,
};
});

const fetchChartData = () => {
fetch("http://127.0.0.1:8001/dashboard/chart")
.then((response) => {
const stream = response.body;
const reader = stream!.getReader();
const readChunk = () => {
reader
.read()
.then(({ value, done }) => {
if (done) {
console.log("Stream finished");
return;
}
const chunkString = new TextDecoder().decode(value);
try {
const resData = JSON.parse(chunkString);
let time = resData.data.time.split("T")[1];
let second = Math.floor(+time.split(":")[2]);
time =
time.split(":")[0] + ":" + time.split(":")[1] + ":" + second;
//@ts-ignore
setRequest((prevData) => {
const newData = [...prevData.data, resData.data.all_requests];
const newData1 = [
...prevData.data1,
resData.data.successful_requests,
];
const newData2 = [
...prevData.data2,
resData.data.failed_requests,
];
const newLabels = [...prevData.labels, time];
newData.splice(0, 1);
newData1.splice(0, 1);
newData2.splice(0, 1);
newLabels.splice(0, 1);
return {
data: newData,
data1: newData1,
data2: newData2,
labels: newLabels,
};
});
setOverviewData((prevData) => ({
...prevData,
totalRequests: resData.data.all_requests,
successfullRequests: resData.data.successful_requests,
failedRequests: resData.data.failed_requests,
}));
} catch (error) {
console.log(error);
}
setOverviewData((prevData) => ({
...prevData,
totalRequests: resData.data.all_requests,
successfulRequests: resData.data.successful_requests,
failedRequests: resData.data.failed_requests,
}));
}
} catch (error) {
console.warn("Warning in fetching chart data:", error);
}
};

fetchOverviewData();
fetchChartData();

readChunk();
})
.catch((error) => {
console.error(error);
});
};
readChunk();
})
.catch((error) => {
console.error(error);
});
};
return () => {
abortController.abort();
};
}, [navigate]);

return (
<div id="index">
Expand Down
29 changes: 21 additions & 8 deletions frontend/src/pages/spiders/details.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { useEffect, useState } from "react";
import { useLocation } from "react-router-dom";
import { useLocation, useNavigate } from "react-router-dom";
import Icon from "@mdi/react";
import { mdiInformationOutline } from "@mdi/js";

Expand Down Expand Up @@ -57,14 +57,23 @@ function SpiderDetailsPage() {
WARNING: "var(--color-warning)",
};

const navigate = useNavigate();
const abortController = new AbortController();

useEffect(() => {
fetchCrawlerDetails();
fetchChartData();
fetchLogsData();
}, []);

return () => {
abortController.abort();
};
}, [navigate]);

const fetchChartData = () => {
fetch(`http://127.0.0.1:8001/${state.data.id}/chart`)
fetch(`http://127.0.0.1:8001/${state.data.id}/chart`, {
signal: abortController.signal,
})
.then((response) => {
const stream = response.body;
const reader = stream!.getReader();
Expand Down Expand Up @@ -120,12 +129,14 @@ function SpiderDetailsPage() {
readChunk();
})
.catch((error) => {
console.error(error);
console.warn(error);
});
};

const fetchCrawlerDetails = () => {
fetch(`http://127.0.0.1:8001/${state.data.id}/detail`)
fetch(`http://127.0.0.1:8001/${state.data.id}/detail`, {
signal: abortController.signal,
})
.then((response) => {
const stream = response.body;
const reader = stream!.getReader();
Expand Down Expand Up @@ -160,12 +171,14 @@ function SpiderDetailsPage() {
readChunk();
})
.catch((error) => {
console.error(error);
console.warn(error);
});
};

const fetchLogsData = () => {
fetch(`http://127.0.0.1:8001/${state.data.id}/logs`)
fetch(`http://127.0.0.1:8001/${state.data.id}/logs`, {
signal: abortController.signal,
})
.then((response) => {
const stream = response.body;
const reader = stream!.getReader();
Expand Down Expand Up @@ -203,7 +216,7 @@ function SpiderDetailsPage() {
readChunk();
})
.catch((error) => {
console.error(error);
console.warn(error);
});
};

Expand Down
Loading