+
+
+
+
+ )
+}
\ No newline at end of file
diff --git a/FE/src/components/module/QueryOutput.jsx b/FE/src/components/module/QueryOutput.jsx
new file mode 100644
index 0000000..c9407d4
--- /dev/null
+++ b/FE/src/components/module/QueryOutput.jsx
@@ -0,0 +1,67 @@
+import React, { useState, useEffect } from 'react';
+
+import { styled, Box } from '@mui/system'
+import CustomContainer from '../atom/CustomContainer';
+import CustomText from '../atom/CustomText';
+import IconBox from '../atom/IconBox';
+
+import CopyIcon from '../../assets/icon/copy.png'
+import ReloadIcon from '../../assets/icon/reload.png'
+import LoadingIcon from '../../assets/icon/spinner.gif'
+
+export default function QueryOutput({ children, answer }) {
+ const [displayedText, setDisplayedText] = useState('');
+
+ function onClickCopy() {
+ navigator.clipboard.writeText(answer);
+ }
+
+ function onClickReload() {
+ window.location.reload();
+ }
+
+ useEffect(() => {
+ let index = 0;
+
+ const interval = setInterval(() => {
+ if (index < answer?.length) {
+ const char = answer[index];
+
+ setDisplayedText((prev) => prev + char);
+ index++;
+ } else {
+ clearInterval(interval);
+ }
+ }, 20);
+
+ return () => clearInterval(interval);
+ }, [answer]);
+
+ return (
+
+
+ {children}
+
+
+ {displayedText ?
+
+ ๐ฌ
+ ๋ต๋ณ
+
+ :
+
+
+ hmm .. ์๊ฐ์ค
+
+ }
+
+
+ {displayedText}
+
+
+
+
+
+
+ );
+}
diff --git a/FE/src/components/module/SelectModel.jsx b/FE/src/components/module/SelectModel.jsx
new file mode 100644
index 0000000..41c32ec
--- /dev/null
+++ b/FE/src/components/module/SelectModel.jsx
@@ -0,0 +1,24 @@
+import React from 'react';
+
+import { Box } from '@mui/material';
+
+export default function SelectModel({ onModelChange, selectedValue }) {
+ function handleChange(e) {
+ if (onModelChange) {
+ onModelChange(e.target.value);
+ }
+ }
+
+ return (
+
+
+
+
+
+ );
+}
diff --git a/FE/src/components/module/StockInfoBox.jsx b/FE/src/components/module/StockInfoBox.jsx
new file mode 100644
index 0000000..09d2cb9
--- /dev/null
+++ b/FE/src/components/module/StockInfoBox.jsx
@@ -0,0 +1,13 @@
+import React from 'react'
+
+import { Box } from '@mui/material'
+import CustomText from '../atom/CustomText'
+
+export default function StockInfoBox({ text, value, color }) {
+ return (
+
+ {text}
+ {value}
+
+ )
+}
\ No newline at end of file
diff --git a/FE/src/components/module/StockNewsWidget.jsx b/FE/src/components/module/StockNewsWidget.jsx
new file mode 100644
index 0000000..b027c50
--- /dev/null
+++ b/FE/src/components/module/StockNewsWidget.jsx
@@ -0,0 +1,58 @@
+import React, { useState, useEffect } from 'react';
+import axios from 'axios';
+import moment from 'moment';
+
+import CustomText from '../atom/CustomText';
+import CustomContainer from '../atom/CustomContainer';
+
+const URL = import.meta.env.VITE_NEWS_API_URL;
+const currentDate = moment().format('YYYY-MM-DD');
+const oneWeekAgoDate = moment().subtract(7, 'days').format('YYYY-MM-DD');
+
+export default function StockNewsWidget({ company }) {
+ const [newsData, setNewsData] = useState([]);
+ const [currentIndex, setCurrentIndex] = useState(0);
+
+ function getNewsSuccess(res) {
+ if (res.data.data) {
+ setNewsData(res.data.data);
+ }
+ }
+
+ function getNews() {
+ axios
+ .get(URL, {
+ params: {
+ keyword: `title:${company}`,
+ date_from: oneWeekAgoDate,
+ date_to: currentDate,
+ page_size: '30',
+ },
+ })
+ .then(getNewsSuccess);
+ }
+
+ useEffect(() => {
+ getNews();
+ }, [company]);
+
+ useEffect(() => {
+ if (newsData.length > 0) {
+ const interval = setInterval(() => {
+ setCurrentIndex((prevIndex) => (prevIndex + 1) % newsData.length);
+ }, 4000);
+
+ return () => clearInterval(interval);
+ }
+ }, [newsData]);
+
+ return (
+
+ {newsData.length > 0 ? (
+
+ {'๐ข' + ' ' + newsData[currentIndex].title}
+
+ ) : null}
+
+ );
+}
diff --git a/FE/src/components/module/StockWidget.jsx b/FE/src/components/module/StockWidget.jsx
new file mode 100644
index 0000000..723d9f4
--- /dev/null
+++ b/FE/src/components/module/StockWidget.jsx
@@ -0,0 +1,104 @@
+import React, { useState, useEffect } from 'react'
+import axios from 'axios'
+import moment from 'moment'
+
+import { Box } from '@mui/system'
+import CustomText from '../atom/CustomText'
+import CustomContainer from '../atom/CustomContainer'
+import StockInfoBox from './StockInfoBox'
+import LoadingIcon from '../../assets/icon/spinner_widget.gif'
+
+const URL = 'https://apis.data.go.kr/1160100/service/GetStockSecuritiesInfoService/getStockPriceInfo';
+const apiKey = import.meta.env.VITE_STOCK_API_KEY;
+const currentDate = moment().format('YYYYMMDD');
+
+export default function StockWidget( { company }) {
+ const [stockData, setStockData] = useState();
+
+ function getStockSuccess(res) {
+ const data = res.data.response.body.items.item[0];
+
+ if (!data || data.length === 0) {
+ console.error('No data found in API response');
+ return;
+ }
+
+ setStockData({
+ itmsNm: data.itmsNm, // ์ข ๋ชฉ๋ช
+ basDt: data.basDt, // ๋ ์ง
+ srtnCd: data.srtnCd, // ์ข ๋ชฉ์ฝ๋
+ mrktCtg: data.mrktCtg, // ์์ฅ๊ตฌ๋ถ
+ mkp: data.mkp, // ์๊ฐ
+ clpr: data.clpr, // ์ข ๊ฐ
+ hipr: data.hipr, // ๊ณ ๊ฐ
+ lopr: data.lopr, // ์ ๊ฐ
+ vs: data.vs, // ์ ์ผ ๋๋น
+ fltRt: data.fltRt, // ๋ฑ๋ฝ๋ฅ
+ mrktTotAmt: data.mrktTotAmt, // ์๊ฐ์ด์ก
+ trqu: data.trqu, // ๊ฑฐ๋๋
+ yesterdayClpr: res.data.response.body.items.item[1].clpr,
+ });
+ }
+
+ function getStock() {
+ axios.get(URL, {
+ params: {
+ serviceKey: apiKey,
+ resultType: 'json',
+ endBasDt: currentDate,
+ likeItmsNm: company,
+ numOfRows: 2,
+ pageNo: 1,
+ }
+ })
+ .then(getStockSuccess);
+ }
+
+ useEffect(() => {
+ getStock();
+ }, [company])
+
+ return (
+
+ {stockData ? (
+
+
+ {stockData.itmsNm}
+
+ {stockData.srtnCd}
+ {stockData.mrktCtg}
+
+ {`${new Intl.NumberFormat().format(stockData.clpr)}`}
+
+ {parseFloat(stockData.vs) < 0 ? `โผ ${new Intl.NumberFormat().format(stockData.vs.slice(1,))}` : `โฒ ${new Intl.NumberFormat().format(stockData.vs)}`}
+ {`${stockData.fltRt}%`}
+
+
+
+
+ ์์ธ์ ๋ณด
+
+
+
+ ์ข ๋ชฉ์ ๋ณด
+
+
+
+
+ ) : (
+
+
+
+
+
+
+
+
+
+
+
+
+ )}
+
+ );
+}
\ No newline at end of file
diff --git a/FE/src/components/page/ChatPage.jsx b/FE/src/components/page/ChatPage.jsx
new file mode 100644
index 0000000..d3a06fe
--- /dev/null
+++ b/FE/src/components/page/ChatPage.jsx
@@ -0,0 +1,98 @@
+import React, { useState, useEffect, useRef } from 'react';
+import { useNavigate, useLocation } from 'react-router-dom';
+
+import { styled, Box } from '@mui/system';
+import SideBar from '../atom/SideBar';
+import IconBox from '../atom/IconBox';
+
+import StockWidget from '../module/StockWidget';
+import StockNewsWidget from '../module/StockNewsWidget';
+import QueryInput from '../module/QueryInput';
+import QueryOutput from '../module/QueryOutput';
+
+import HomeIcon from '../../assets/icon/home.png';
+import Logo from '../../assets/logo.png'
+
+import { requestQuery } from '../../api/query';
+
+export default function ChatPage() {
+ const navigate = useNavigate();
+ const location = useLocation();
+ const hasFetched = useRef(false);
+
+ const query = location.state?.query;
+ const model = location.state?.model || 'GPT-4o-mini';
+
+ const [sessionId, setSessionId] = useState('');
+ const [queries, setQueries] = useState([query]);
+ const [answers, setAnswers] = useState([]);
+ const [company, setCompany] = useState(location.state?.company);
+ const [chatHistory, setChatHistory] = useState([]);
+
+
+ const max_tokens = 1000;
+ const temperature = 0.7;
+
+ function onClickHome() {
+ navigate('/');
+ }
+
+ function handleQuerySubmit(newQuery) {
+ setQueries((prev) => [...prev, newQuery]);
+ requestApi(newQuery);
+ }
+
+ function handleCompanySubmit(newCompany) {
+ setCompany(newCompany);
+ }
+
+ function requestApi(query) {
+ requestQuery(
+ sessionId,
+ query,
+ company === 'NAVER' ? '๋ค์ด๋ฒ' : company,
+ model,
+ max_tokens,
+ temperature,
+ chatHistory,
+ (res) => {
+ setSessionId(res.data.session_id);
+ if (res.data.company === '') setCompany(res.data.company === '๋ค์ด๋ฒ' ? 'NAVER' : res.data.company);
+ setAnswers((prev) => [...prev, res.data.answer]);
+ setChatHistory(res.data.chat_history);
+ },
+ (err) => console.log('requestQueryFail:', err)
+ );
+ }
+
+ useEffect(() => {
+ if (!hasFetched.current && query) {
+ hasFetched.current = true;
+ requestApi(query);
+ }
+ }, [query]);
+
+ return (
+
+
+
+
+
+
+ {company && }
+
+ {company && }
+
+
+
+
+
+ {queries.map((q, idx) => (
+ {q}
+ ))}
+
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/FE/src/components/page/MainPage.jsx b/FE/src/components/page/MainPage.jsx
new file mode 100644
index 0000000..a53d154
--- /dev/null
+++ b/FE/src/components/page/MainPage.jsx
@@ -0,0 +1,70 @@
+import React, { useEffect, useState } from 'react';
+
+import { styled, Box } from '@mui/system'
+import CustomText from '../atom/CustomText';
+import SideBar from '../atom/SideBar'
+
+import IndexWidget from '../module/IndexWidget';
+import ExchangeRateWidget from '../module/ExchangeRateWidget';
+import NewsWidget from '../module/NewsWidget';
+import QueryInput from '../module/QueryInput';
+import SelectModel from '../module/SelectModel';
+
+import Logo from '../../assets/logo.png'
+
+export default function MainPage() {
+ const [model, setModel] = useState('');
+ const [message, setMessage] = useState('');
+ const [visibleMessage, setVisibleMessage] = useState('');
+ const [visibleIcon, setVisibleIcon] = useState('');
+
+ function handleChange(value) {
+ setModel(value);
+ };
+
+ function handleUpload(value) {
+ setMessage(value);
+ }
+
+ useEffect(() => {
+ if (message) {
+ setVisibleMessage(message);
+
+ if (message === 'PDF ๋ฐ์๋ผ ~') {
+ setVisibleIcon(LoadingIcon);
+ }
+ else if (message === 'PDF ์ ์ก ์๋ฃ !') {
+ setVisibleIcon('');
+ const timer = setTimeout(() => {
+ setVisibleMessage('');
+ }, 3000);
+
+ return () => clearTimeout(timer);
+ }
+
+ }
+ }, [message])
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+ {'์ํ๋ ๊ธ์ต์ ๋ณด๋ฅผ ๊ฒ์ํด๋ณด์ธ์ ' + '๐'}
+
+
+
+
+ {visibleMessage}
+
+
+
+ );
+}
diff --git a/FE/src/configs/router.jsx b/FE/src/configs/router.jsx
new file mode 100644
index 0000000..e0e2cc4
--- /dev/null
+++ b/FE/src/configs/router.jsx
@@ -0,0 +1,14 @@
+import React from 'react';
+import { BrowserRouter, Routes, Route } from 'react-router-dom';
+
+import MainPage from '../components/page/MainPage';
+import ChatPage from '../components/page/ChatPage';
+
+export default function RouterConfiguration() {
+ return (
+
+ } />
+ } />
+
+ );
+}
\ No newline at end of file
diff --git a/FE/src/configs/theme.jsx b/FE/src/configs/theme.jsx
new file mode 100644
index 0000000..3a564cf
--- /dev/null
+++ b/FE/src/configs/theme.jsx
@@ -0,0 +1,9 @@
+import { createTheme } from '@mui/material/styles';
+
+const theme = createTheme({
+ typography: {
+ fontFamily: "'Pretendard-Regular', sans-serif",
+ },
+});
+
+export default theme;
\ No newline at end of file
diff --git a/FE/src/index.css b/FE/src/index.css
new file mode 100644
index 0000000..58bb9ee
--- /dev/null
+++ b/FE/src/index.css
@@ -0,0 +1,45 @@
+@font-face {
+ font-family: 'GmarketSansLight';
+ src: url('static/fonts/GmarketSansTTFLight.woff') format('woff');
+ font-weight: normal;
+ font-style: normal;
+}
+
+@font-face {
+ font-family: 'GmarketSansMedium';
+ src: url('static/fonts/GmarketSansTTFMedium.woff') format('woff');
+ font-weight: normal;
+ font-style: normal;
+}
+
+@font-face {
+ font-family: 'GmarketSansBold';
+ src: url('static/fonts/GmarketSansTTFBold.woff') format('woff');
+ font-weight: normal;
+ font-style: normal;
+}
+
+@font-face {
+ font-family: 'Pretendard-Light';
+ src: url('https://fastly.jsdelivr.net/gh/Project-Noonnu/noonfonts_2107@1.1/Pretendard-Light.woff') format('woff');
+ font-weight: 400;
+ font-style: normal;
+}
+
+@font-face {
+ font-family: 'Pretendard-Regular';
+ src: url('https://fastly.jsdelivr.net/gh/Project-Noonnu/noonfonts_2107@1.1/Pretendard-Regular.woff') format('woff');
+ font-weight: 400;
+ font-style: normal;
+}
+
+@font-face {
+ font-family: 'Pretendard-Bold';
+ src: url('https://fastly.jsdelivr.net/gh/Project-Noonnu/noonfonts_2107@1.1/Pretendard-SemiBold.woff') format('woff');
+ font-weight: 400;
+ font-style: normal;
+}
+
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
diff --git a/FE/src/main.jsx b/FE/src/main.jsx
new file mode 100644
index 0000000..4f47a9c
--- /dev/null
+++ b/FE/src/main.jsx
@@ -0,0 +1,21 @@
+import { StrictMode } from 'react';
+import { createRoot } from 'react-dom/client';
+import { BrowserRouter } from 'react-router-dom';
+
+import App from './App.jsx';
+import theme from './configs/theme';
+import './index.css';
+
+import { ThemeProvider } from '@mui/material/styles';
+import CssBaseline from '@mui/material/CssBaseline';
+
+createRoot(document.getElementById('root')).render(
+
+
+
+
+
+
+
+ ,
+);
diff --git a/FE/tailwind.config.js b/FE/tailwind.config.js
new file mode 100644
index 0000000..2051635
--- /dev/null
+++ b/FE/tailwind.config.js
@@ -0,0 +1,9 @@
+/** @type {import('tailwindcss').Config} */
+
+module.exports = {
+ content: ["./src/**/*.{js,jsx,ts,tsx}"],
+ theme: {
+ extend: {},
+ },
+ plugins: [require("daisyui")],
+};
diff --git a/FE/vite.config.js b/FE/vite.config.js
new file mode 100644
index 0000000..8b0f57b
--- /dev/null
+++ b/FE/vite.config.js
@@ -0,0 +1,7 @@
+import { defineConfig } from 'vite'
+import react from '@vitejs/plugin-react'
+
+// https://vite.dev/config/
+export default defineConfig({
+ plugins: [react()],
+})
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..0ad25db
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,661 @@
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published
+ by the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..9cf8224
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,39 @@
+clean: clean-pyc clean-test
+quality: set-style-dep check-quality
+style: set-style-dep set-style
+
+##### basic #####
+set-git:
+ git config --local commit.template .gitmessage
+ git update-index --skip-worktree ./config/config.yaml
+
+set-style-dep:
+ pip3 install click==8.0.4 isort==5.13.2 black==24.8.0 flake8==7.1.1
+
+set-style:
+ black --config pyproject.toml .
+ isort --settings-path pyproject.toml .
+ flake8 . --max-line-length=120
+
+check-quality:
+ black --config pyproject.toml --check .
+ isort --settings-path pyproject.toml --check-only .
+
+##### clean #####
+clean-pyc:
+ find . -name '*.pyc' -exec rm -f {} +
+ find . -name '*.pyo' -exec rm -f {} +
+ find . -name '*~' -exec rm -f {} +
+ find . -name '__pycache__' -exec rm -fr {} +
+
+clean-test:
+ rm -f .coverage
+ rm -f .coverage.*
+ rm -rf .pytest_cache
+ rm -rf .mypy_cache
+
+clean-all: clean-pyc clean-test clean-build
+
+clean-build:
+ rm -rf build/
+ rm -rf dist/
\ No newline at end of file
diff --git a/PDF_OCR/README.MD b/PDF_OCR/README.MD
new file mode 100644
index 0000000..40275b7
--- /dev/null
+++ b/PDF_OCR/README.MD
@@ -0,0 +1,57 @@
+# PDF_OCR ํ์ดํ๋ผ์ธ
+
+## ํ์ดํ๋ผ์ธ ์คํ ๋ฐฉ๋ฒ
+
+### ํจํค์ง ์ค์น
+``` bash
+pip install -r requirements.txt
+```
+
+### ์คํ
+``` bash
+python pdf_parser.py -i "./pdf/input_pdf_folder" -r
+python data_postprocessor.py
+```
+
+---
+
+## ํ์ผ ๊ตฌ์กฐ
+
+```
+PDF_OCR/
+โโโ config.py
+โโโ ocr_api.py
+โโโ pdf_parser.py
+โโโ ocr_processor.py
+โโโ table_converter.py
+โโโ data_postprocessor.py
+โโโ requirements.txt
+โโโ README.MD
+โโโ pdf/
+โ โโโ input_pdf_folder/
+โ โ โโโ pdf_file1.pdf
+โ โ โโโ pdf_file2.pdf
+โ โ โโโ ...
+โโโ ocr_results/
+โ โโโ input_pdf_folder/
+โ โ โโโ pdf_file1/
+โ โ โ โโโ page_1/
+โ โ โ โ โโโ 1_plain text_3_result.json
+โ โ โ โ โโโ ...
+โ โ โ โโโ ...
+โ โ โโโ ...
+```
+---
+
+## ํ์ดํ๋ผ์ธ ์ค๋ช
+
+1. PDF ํ์ผ์ ์ด๋ฏธ์ง๋ก ๋ณํ (PDF -> ์ด๋ฏธ์ง)
+2. ์ด๋ฏธ์ง๋ฅผ ๋ฐ์ด๋ฉ ๋ฐ์ค๋ก ์ถ์ถ (DocLayout-YOLO) (์ด๋ฏธ์ง -> ๋ฐ์ด๋ฉ ๋ฐ์ค)
+3. OCR ์ฒ๋ฆฌ (Clova OCR/Upstage Parser API) (๋ฐ์ด๋ฉ ๋ฐ์ค -> OCR ๊ฒฐ๊ณผ(json))
+4. OCR ๊ฒฐ๊ณผ๋ฅผ ์ ์ (json -> json)
+4.1. description ๋ฌ๊ธฐ
+4.2 ํ ์ด๋ธ description์ LLM์๊ฒ query
+4.2.1 ํ ์ด๋ธ์ csv๋ ์ ์ฅ (json -> csv)
+5. ์ ์ ๋ ๊ฒฐ๊ณผ๋ฅผ ์ทจํฉ (json -> json)
+6. ์ทจํฉ๋ ๊ฒฐ๊ณผ๋ฅผ vector DB์ ์ ์ฅ (json -> vector DB)
+
diff --git a/PDF_OCR/__init__.py b/PDF_OCR/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/PDF_OCR/config.py b/PDF_OCR/config.py
new file mode 100644
index 0000000..623ef33
--- /dev/null
+++ b/PDF_OCR/config.py
@@ -0,0 +1,59 @@
+import os
+from pathlib import Path
+
+# ํ๋ก์ ํธ ๋ฃจํธ ๋๋ ํ ๋ฆฌ ์ค์
+PROJECT_ROOT = Path(__file__).parent # .parent ์ ๊ฑฐํ์ฌ PDF_OCR ๋๋ ํ ๋ฆฌ๋ฅผ ๋ฃจํธ๋ก ์ค์
+
+# ๊ธฐ๋ณธ ์ค์ ๊ฐ๋ค
+DEFAULT_CONFIG = {
+ # ๋ชจ๋ธ ๊ด๋ จ ์ค์
+ "MODEL": {
+ "path": os.path.expanduser(
+ "~/.cache/huggingface/hub/models--juliozhao--DocLayout-YOLO-DocStructBench/snapshots/8c3299a30b8ff29a1503c4431b035b93220f7b11/doclayout_yolo_docstructbench_imgsz1024.pt"
+ ),
+ # "path": "doclayout_yolo_docstructbench_imgsz1024.pt", # ๊ฐ๋จํ ๊ธฐ๋ณธ ๊ฒฝ๋ก
+ "imgsz": 1024,
+ "line_width": 5,
+ "font_size": 20,
+ "conf": 0.2,
+ "threshold": 0.05,
+ },
+ # ๋๋ ํ ๋ฆฌ ์ค์
+ "DIRS": {
+ "input_dir": str(PROJECT_ROOT / "pdf"), # PDF ํ์ผ์ด ์๋ ๋๋ ํ ๋ฆฌ
+ "output_dir": str(PROJECT_ROOT / "output"), # ์ค๊ฐ ๊ฒฐ๊ณผ๋ฌผ ์ ์ฅ ๋๋ ํ ๋ฆฌ
+ "database_dir": str(PROJECT_ROOT / "database"), # ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ ์ฅ ๋๋ ํ ๋ฆฌ
+ "ocr_output_dir": str(PROJECT_ROOT / "ocr_results"), # OCR ๊ฒฐ๊ณผ ์ ์ฅ ๋๋ ํ ๋ฆฌ
+ },
+ # ํ์ผ๋ช ์ค์
+ "FILES": {
+ "database": "database.csv",
+ },
+}
+
+
+def get_config(custom_config=None):
+ """
+ ๊ธฐ๋ณธ ์ค์ ๊ฐ๊ณผ ์ฌ์ฉ์ ์ ์ ์ค์ ๊ฐ์ ๋ณํฉํ์ฌ ๋ฐํ
+
+ Args:
+ custom_config (dict, optional): ์ฌ์ฉ์ ์ ์ ์ค์ ๊ฐ
+
+ Returns:
+ dict: ์ต์ข ์ค์ ๊ฐ
+ """
+ config = DEFAULT_CONFIG.copy()
+
+ if custom_config:
+ # ์ค์ฒฉ๋ ๋์ ๋๋ฆฌ ์ ๋ฐ์ดํธ
+ for key, value in custom_config.items():
+ if isinstance(value, dict) and key in config:
+ config[key].update(value)
+ else:
+ config[key] = value
+
+ # ๋๋ ํ ๋ฆฌ๋ค ์์ฑ
+ for dir_path in config["DIRS"].values():
+ os.makedirs(dir_path, exist_ok=True)
+
+ return config
diff --git a/PDF_OCR/data_postprocess.py b/PDF_OCR/data_postprocess.py
new file mode 100644
index 0000000..f5dc096
--- /dev/null
+++ b/PDF_OCR/data_postprocess.py
@@ -0,0 +1,362 @@
+import json
+import os
+import sys
+import time
+import warnings
+
+import pandas as pd
+import requests
+from bs4 import BeautifulSoup
+from dotenv import load_dotenv
+
+load_dotenv()
+warnings.filterwarnings("ignore")
+
+"""
+ {//text 3
+ "title":"24๋ ์์ ์ด์ต",
+ "description":"{์๋ฌธ}",
+ "category":"text",
+ "company":"naver",
+ "securities":"hana",
+ "page":"1",
+ "date":"24.10.17",
+ "path":"/cation/naver/kybo/1017/1/1_plain text_3.png"
+ }
+ api๋ฅผ ์ด์ฉํด html์ queryํด html์ ๋ํ ์ค๋ช ์ description์ ๋ฃ์ด์ค๋ค.
+ ์ด๋ ๊ฒ ๋ง๋ค์ด์ง ๋ฐ์ดํฐ๋ฅผ ๋ชจ๋ ๋ชจ์์ ํ๋์ ํ์ผ๋ก ์ ์ฅํ๋ค.
+
+"""
+
+
+class MakeData:
+ def __init__(self):
+ self.base_folder = "ocr_results"
+ self.output_folder = "ocr_results"
+ self.error_cnt = 0
+ # ๊ฒฐ๊ณผ ์ ์ฅํ ํด๋ ์์ฑ
+ self.existing_data = self.load_existing_data()
+ self.failed_logs = self.load_failed_logs()
+
+ if not os.path.exists(self.output_folder):
+ os.makedirs(self.output_folder)
+
+ def load_existing_data(self):
+ try:
+ with open("new_data/All_data/table_data.json", "r", encoding="utf-8") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return []
+
+ def load_failed_logs(self):
+ try:
+ with open("fail_logs.json", "r", encoding="utf-8") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return []
+
+ def process_folders(self):
+ data = self.existing_data
+ try:
+ # ์ฒซ ๋ฒ์งธ ์ฒ๋ฆฌ ์๋
+ self._process_all_folders(data)
+
+ # rate limit ์ค๋ฅ๊ฐ ์๋ ์ผ์ด์ค ์ฌ์ฒ๋ฆฌ
+ retry_count = 0
+ while retry_count < 3:
+ rate_limit_files = []
+ for log in self.failed_logs:
+ if log.get("status_code") == "42901": # rate limit ์ค๋ฅ
+ rate_limit_files.append(log["file_path"])
+
+ if not rate_limit_files:
+ break
+
+ print(f"\n์ฌ์๋ {retry_count + 1}: Rate limit ์ค๋ฅ ํ์ผ {len(rate_limit_files)}๊ฐ ์ฌ์ฒ๋ฆฌ ์์")
+ time.sleep(60) # rate limit ํด์ ๋ฅผ ์ํด 1๋ถ ๋๊ธฐ
+
+ for file_path in rate_limit_files:
+ description = self.process_table_json_files(file_path)
+ if description: # ์ฑ๊ณต์ ์ผ๋ก ์ฒ๋ฆฌ๋ ๊ฒฝ์ฐ
+ # ์ฑ๊ณตํ ํ์ผ์ ๋ฐ์ดํฐ ์ถ๊ฐ
+ path_parts = file_path.split(os.sep)
+ company = path_parts[1]
+ broker = path_parts[2]
+ page = path_parts[3]
+ broker_date = broker.split("_")[-1]
+ broker_name = broker_date.split("(")[0]
+ broker_date = broker_date.split("(")[1].replace(")", "")
+
+ data.append(
+ {
+ "title": "",
+ "description": description,
+ "category": "table",
+ "company": company,
+ "securities": broker_name,
+ "page": page,
+ "date": broker_date,
+ "path": file_path,
+ }
+ )
+ # ์ฒ๋ฆฌ๋ ํ์ผ ๋ก๊ทธ ํ์ผ ์ ๊ฑฐ
+
+ #
+ retry_count += 1
+
+ # ์ต์ข ๋ฐ์ดํฐ ์ ์ฅ
+ with open("new_data/All_data/table_data.json", "w", encoding="utf-8") as f:
+ json.dump(data, f, ensure_ascii=False, indent=2)
+
+ except Exception as e:
+ print(f"์ ์ฒด ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {e}")
+ self.save_failed_log("process_folders", str(e))
+
+ def _process_all_folders(self, data):
+ # ๊ธฐ์กด์ process_folders ๋ก์ง์ ์ฌ๊ธฐ๋ก ์ด๋
+ for company in os.listdir(self.base_folder):
+ company_path = os.path.join(self.base_folder, company)
+ if not os.path.isdir(company_path):
+ continue
+
+ # ํ์ฌ๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ
+ company_output = os.path.join(self.output_folder, company)
+ if not os.path.exists(company_output):
+ os.makedirs(company_output)
+
+ # ์ฆ๊ถ์ฌ๋ณ ํด๋ ์ํ
+ for broker in os.listdir(company_path):
+ broker_path = os.path.join(company_path, broker)
+ if not os.path.isdir(broker_path):
+ continue
+ print(f"์ฆ๊ถ์ฌ๋ณ ํด๋ ์ํ: {broker}")
+ # ์ฆ๊ถ์ฌ๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ
+ broker_output = os.path.join(company_output, broker)
+ if not os.path.exists(broker_output):
+ os.makedirs(broker_output)
+
+ # ํ์ด์ง๋ณ ํด๋ ์ํ
+ for page in os.listdir(broker_path):
+ page_path = os.path.join(broker_path, page)
+ if not os.path.isdir(page_path):
+ continue
+ # ํ์ด์ง๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ ํท๊ฐ๋ ค์ฃฝ๊ฒ ๋ค
+ page_output = os.path.join(broker_output, page)
+ if not os.path.exists(page_output):
+ os.makedirs(page_output)
+
+ # html ํ์ผ ์ฒ๋ฆฌ
+
+ for file in os.listdir(page_path):
+ if not file.lower().endswith((".json")):
+ continue
+ if not "table" in file:
+ continue
+ description = self.process_table_json_files(os.path.join(page_path, file))
+ broker_date = broker.split("_")[-1]
+ broker_name = broker_date.split("(")[0]
+ broker_date = broker_date.split("(")[1].replace(")", "")
+ data_category = file.split("_")[1]
+ data.append(
+ {
+ "title": "",
+ "description": description,
+ "category": "table",
+ "company": company,
+ "securities": broker_name,
+ "page": page,
+ "date": broker_date,
+ "path": f"./ocr_results/{company}/{page}/{file}",
+ }
+ )
+
+ def process_table_json_files(self, input_path):
+
+ try:
+ with open(input_path, "r", encoding="utf-8-sig") as f:
+ json_data = json.load(f)
+ html = json_data["content"]["html"]
+
+ # api๋ฅผ ์ด์ฉํด html์ queryํด html์ ๋ํ ์ค๋ช ์ description์ ๋ฃ์ด์ค๋ค.
+ # ์ด๋ ๊ฒ ๋ง๋ค์ด์ง ๋ฐ์ดํฐ๋ฅผ ๋ชจ๋ ๋ชจ์์ ํ๋์ ํ์ผ๋ก ์ ์ฅํ๋ค.
+
+ api_url = "https://clovastudio.stream.ntruss.com/testapp/v1/chat-completions/HCX-003"
+ studio_key = os.getenv("clova_studio_api_key")
+ request_id = os.getenv("clova_request_id")
+ headers = {
+ "Authorization": "Bearer " + studio_key,
+ "X-NCP-CLOVASTUDIO-REQUEST-ID": request_id,
+ "Content-Type": "application/json; charset=utf-8",
+ }
+ # print(f"์ฒ๋ฆฌ ์๋ฃ: {output_base} : {file}")
+ preset_text = [
+ {
+ "role": "system",
+ "content": "์ฃผ์ด์ง html์ table์ html๋ก ํํํ ๊ฒ์ ๋๋ค. ํด๋น ํ์์ ์์น๋ฅผ ์ ์ธํ ๋ชจ๋ ํญ๋ชฉ์ ์ ๋ณด๋ฅผ ๋ฌธ์ฅ์ผ๋ก ์์ฝํด์ ์๋ ค์ฃผ์ธ์. ์ธ๋ถํญ๋ชฉ์ ์ ๋ณด๋ ํฌํจํด์ฃผ์ธ์\n์์: ํด๋น ํ๋ 2022A๋ถํฐ 2026F๊น์ง์ ๋งค์ถ์ก, ๋งค์ถ์๊ฐ, ๋งค์ถ์ด์ด์ต, ํ๋งค๋น์๊ด๋ฆฌ๋น, ์์ ์ด์ต, ...(์ ๋ถ๋ค) ์ฌ๋ฌด์ ๋ณด๋ฅผ ์ ๊ณตํ๊ณ ์์ต๋๋ค.",
+ },
+ {"role": "user", "content": html},
+ {"role": "assistant", "content": ""},
+ ]
+ request_data = {
+ "messages": preset_text,
+ "topP": 0.8,
+ "topK": 0,
+ "maxTokens": 400,
+ "temperature": 0.5,
+ "repeatPenalty": 5.0,
+ "stopBefore": [],
+ "includeAiFilters": True,
+ "seed": 0,
+ }
+ # Query Per Minute 60ํ ์ดํ๋ก ๊ณ ์
+ time.sleep(2)
+ response = requests.post(api_url, headers=headers, json=request_data)
+ response_json = response.json()
+ if response_json["status"]["code"] != "20000":
+ error_message = response_json["status"]["message"]
+ print(f"FAILED : {input_path} - {error_message}")
+ self.save_failed_log(input_path, error_message, response_json["status"]["code"])
+ return ""
+ else:
+ respon_msg = response_json["status"]["code"]
+ print(f"{input_path} SUCCESS : {respon_msg} ")
+ return response_json["result"]["message"]["content"]
+
+ except Exception as e:
+
+ print(f"์ค๋ฅ ๋ฐ์: {e}")
+ self.save_failed_log(input_path, str(e))
+ return ""
+
+ def save_failed_log(self, file_path, error_message, status_code=None):
+ log_entry = {
+ "file_path": file_path,
+ "error_message": error_message,
+ "status_code": status_code,
+ "timestamp": time.strftime("%Y-%m-%d %H:%M:%S"),
+ }
+ self.failed_logs.append(log_entry)
+ with open("fail_logs.json", "w", encoding="utf-8") as f:
+ json.dump(self.failed_logs, f, ensure_ascii=False, indent=2)
+
+
+class TextDataPostprocess:
+ def __init__(self):
+ self.base_folder = "ocr_results"
+ self.output_folder = "ocr_results"
+ self.existing_data = self.load_existing_data()
+ self.failed_logs = self.load_failed_logs()
+
+ if not os.path.exists(self.output_folder):
+ os.makedirs(self.output_folder)
+
+ def load_existing_data(self):
+ try:
+ with open("new_data/All_data/text_data.json", "r", encoding="utf-8") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return []
+
+ def load_failed_logs(self):
+ try:
+ with open("fail_logs.json", "r", encoding="utf-8") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return []
+
+ def process_folders(self):
+ data = []
+ for company in os.listdir(self.base_folder):
+ company_path = os.path.join(self.base_folder, company)
+ if not os.path.isdir(company_path):
+ continue
+
+ # ํ์ฌ๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ
+ company_output = os.path.join(self.output_folder, company)
+ if not os.path.exists(company_output):
+ os.makedirs(company_output)
+
+ # # ์ฆ๊ถ์ฌ๋ณ ํด๋ ์ํ
+ for broker in os.listdir(company_path):
+ broker_path = os.path.join(company_path, broker)
+ if not os.path.isdir(broker_path):
+ continue
+
+ # # ์ฆ๊ถ์ฌ๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ
+ broker_output = os.path.join(company_output, broker)
+ if not os.path.exists(broker_output):
+ os.makedirs(broker_output)
+
+ # ํ์ด์ง๋ณ ํด๋ ์ํ
+ for page in os.listdir(broker_path):
+ page_path = os.path.join(broker_path, page)
+ if not os.path.isdir(page_path):
+ continue
+
+ # ํ์ด์ง๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ ํท๊ฐ๋ ค์ฃฝ๊ฒ ๋ค
+ page_output = os.path.join(broker_output, page)
+
+ if not os.path.exists(page_output):
+ os.makedirs(page_output)
+
+ for file in os.listdir(page_path):
+
+ if not file.lower().endswith((".json")):
+ continue
+ if not "text" in file:
+ continue
+ print(f"text ์ฒ๋ฆฌ ์ค: {file}")
+ description = self.process_text_json_files(os.path.join(page_path, file))
+ broker_date = broker.split("_")[-1]
+ broker_name = broker_date.split("(")[0]
+ broker_date = broker_date.split("(")[1].replace(")", "")
+ data_category = file.split("_")[1]
+ print(description)
+ data.append(
+ {
+ "title": "",
+ "description": description,
+ "category": "text",
+ "company": company,
+ "securities": "All_data",
+ "page": page,
+ "date": "All_data",
+ "path": f"./ocr_results/{company}/{page}/{file}",
+ }
+ )
+ with open("new_data/All_data/text_data.json", "w", encoding="utf-8") as f:
+ json.dump(data, f, ensure_ascii=False, indent=2)
+
+ def process_text_json_files(self, input_path):
+ try:
+ with open(input_path, "r", encoding="utf-8") as f:
+ json_data = json.load(f)
+
+ # images ๋ฐฐ์ด์ ๊ฐ ์ด๋ฏธ์ง์์ fields ๋ฐฐ์ด์ ์ํํ๋ฉฐ inferText ์ถ์ถ
+ all_text = []
+ for image in json_data.get("images", []):
+ for field in image.get("fields", []):
+ if "inferText" in field:
+ all_text.append(field["inferText"])
+
+ # ๋ชจ๋ ํ ์คํธ๋ฅผ ๊ณต๋ฐฑ์ผ๋ก ์ฐ๊ฒฐ
+ return " ".join(all_text)
+
+ except Exception as e:
+ print(f"ํ ์คํธ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {e}")
+ self.save_failed_log(input_path, str(e))
+ return ""
+
+
+def main():
+
+ # processor = MakeData()
+ # processor.process_folders()
+
+ processor2 = TextDataPostprocess()
+ processor2.process_folders()
+ sys.exit()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/PDF_OCR/ocr_api.py b/PDF_OCR/ocr_api.py
new file mode 100644
index 0000000..12fec81
--- /dev/null
+++ b/PDF_OCR/ocr_api.py
@@ -0,0 +1,60 @@
+import json
+import os
+import time
+import uuid
+
+import pandas as pd
+import requests
+from dotenv import load_dotenv
+
+load_dotenv()
+
+
+def process_image_ocr(image_file, is_table=False):
+ """
+ ์ด๋ฏธ์ง ํ์ผ์ ๋ํด OCR์ ์ํํ๋ ํจ์
+
+ Args:
+ image_file (str): OCR์ ์ํํ ์ด๋ฏธ์ง ํ์ผ ๊ฒฝ๋ก
+
+ Returns:
+ dict: OCR ๊ฒฐ๊ณผ
+ """
+ api_url = os.getenv("clova_api_url")
+ secret_key = os.getenv("clova_secret_key")
+ """
+ naver clova ocr api ์ฌ์ฉ
+ version : model version
+ requestId : ์์ฒญ ๊ณ ์ ์๋ณ์
+ timestamp : ์์ฒญ ์๊ฐ
+ enableTableDetection: ํ ์ด๋ธ ์ฌ๋ถ
+ """
+ request_json = {
+ "images": [{"format": "png", "name": "demo"}],
+ "requestId": str(uuid.uuid4()),
+ "version": "V2",
+ "timestamp": int(round(time.time() * 1000)),
+ "enableTableDetection": is_table,
+ }
+
+ payload = {"message": json.dumps(request_json).encode("UTF-8")}
+
+ with open(image_file, "rb") as f:
+ files = [("file", f)]
+ headers = {"X-OCR-SECRET": secret_key}
+ response = requests.request("POST", api_url, headers=headers, data=payload, files=files)
+
+ return response.json()
+
+
+def upstage_ocr(image_file):
+ api_url = os.getenv("up_stage_url")
+ secret_key = os.getenv("up_stage_api_key")
+
+ with open(image_file, "rb") as f:
+ files = {"document": open(image_file, "rb")}
+ data = {"ocr": "force", "base64_encoding": "['table']", "model": "document-parse"}
+ headers = {"Authorization": f"Bearer {secret_key}"}
+ response = requests.request("POST", api_url, headers=headers, files=files, data=data)
+
+ return response.json()
diff --git a/PDF_OCR/ocr_processor.py b/PDF_OCR/ocr_processor.py
new file mode 100644
index 0000000..497ffeb
--- /dev/null
+++ b/PDF_OCR/ocr_processor.py
@@ -0,0 +1,104 @@
+import json
+import os
+
+import pandas as pd
+from ocr_api import process_image_ocr, upstage_ocr
+from table_converter import json_to_table
+
+
+class OCRProcessor:
+ def __init__(self, base_folder="pdf", output_folder="./ocr_results"):
+ self.base_folder = base_folder
+ self.output_folder = output_folder
+
+ # ๊ฒฐ๊ณผ ์ ์ฅํ ํด๋ ์์ฑ
+ if not os.path.exists(self.output_folder):
+ os.makedirs(self.output_folder)
+
+ def process_folders(self):
+ # PDF ํ์ผ๋ช ์ผ๋ก ์์ฑ๋ ํด๋ ์ํ
+ for pdf_folder in os.listdir(self.base_folder):
+ pdf_path = os.path.join(self.base_folder, pdf_folder)
+ if not os.path.isdir(pdf_path):
+ continue
+
+ # PDF๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ
+ pdf_output = os.path.join(self.output_folder, pdf_folder)
+ if not os.path.exists(pdf_output):
+ os.makedirs(pdf_output)
+
+ # images ํด๋ ๊ฒฝ๋ก
+ images_path = os.path.join(pdf_path, "images")
+ if not os.path.exists(images_path):
+ continue
+
+ # ํ์ด์ง๋ณ ํด๋ ์ํ
+ for page in os.listdir(images_path):
+ page_path = os.path.join(images_path, page)
+ if not os.path.isdir(page_path):
+ continue
+
+ # split_images ํด๋ ๊ฒฝ๋ก
+ split_images_path = os.path.join(page_path, "split_images")
+ if not os.path.exists(split_images_path):
+ continue
+
+ # ํ์ด์ง๋ณ ๊ฒฐ๊ณผ ํด๋ ์์ฑ
+ page_output = os.path.join(pdf_output, page)
+ if not os.path.exists(page_output):
+ os.makedirs(page_output)
+
+ # ์ด๋ฏธ์ง ํ์ผ ์ฒ๋ฆฌ
+ self.process_image_files(split_images_path, page_output)
+
+ def process_image_files(self, input_path, output_path):
+ for file in os.listdir(input_path):
+ # plain text๋ table์ด ํฌํจ๋ ํ์ผ๋ง ์ฒ๋ฆฌ
+ if not ("plain text" in file.lower() or "table" in file.lower()):
+ continue
+ # ํ ์ด๋ธ ํ์ผ๋ง ์ฒ๋ฆฌ
+ if "table" in file.lower():
+ if "caption" in file.lower() or "footnote" in file.lower() or "caption" in file.lower():
+ continue
+ if not file.lower().endswith((".png", ".jpg", ".jpeg")):
+ continue
+
+ input_file = os.path.join(input_path, file)
+ output_base = os.path.join(output_path, os.path.splitext(file)[0])
+
+ try:
+ # ๊ฒฐ๊ณผ๊ฐ ํ ์ด๋ธ์ธ ๊ฒฝ์ฐ
+ if "table" in file.lower():
+ result = upstage_ocr(input_file)
+ # JSON ๊ฒฐ๊ณผ ์ ์ฅ
+ json_path = f"{output_base}_result.json"
+ with open(json_path, "w", encoding="utf-8") as f:
+ json.dump(result, f, indent=2, ensure_ascii=False)
+
+ # ํ ์ด๋ธ ๋ฐ์ดํฐ ์ถ์ถ ๋ฐ CSV ์ ์ฅ
+ try:
+ table_df = json_to_table(result)
+ table_df.to_csv(f"{output_base}.csv", encoding="utf-8-sig")
+ except Exception as e:
+ print(f"ํ ์ด๋ธ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์ ({file}): {str(e)}")
+
+ # ์ผ๋ฐ ํ ์คํธ์ธ ๊ฒฝ์ฐ
+ else:
+ result = process_image_ocr(input_file, is_table=False)
+ # JSON ๊ฒฐ๊ณผ๋ง ์ ์ฅ
+ with open(f"{output_base}_result.json", "w", encoding="utf-8") as f:
+ json.dump(result, f, indent=2, ensure_ascii=False)
+
+ print(f"์ฒ๋ฆฌ ์๋ฃ: {file}")
+
+ except Exception as e:
+ print(f"ํ์ผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์ ({file}): {str(e)}")
+
+
+def main():
+ processor = OCRProcessor()
+ processor.process_folders()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/PDF_OCR/pdf_parser.py b/PDF_OCR/pdf_parser.py
new file mode 100644
index 0000000..707085b
--- /dev/null
+++ b/PDF_OCR/pdf_parser.py
@@ -0,0 +1,659 @@
+from typing import Any, Callable, Dict, List, Tuple
+
+import argparse
+import os
+import re
+import shutil
+import sys
+from collections import defaultdict
+from functools import cmp_to_key
+from pathlib import Path
+
+import cv2
+import huggingface_hub
+import numpy as np
+import pandas as pd
+import torch
+from config import get_config
+from doclayout_yolo import YOLOv10
+from huggingface_hub import hf_hub_download # ์๋จ์ import ์ถ๊ฐ
+from pdf2image import convert_from_path
+from tqdm import tqdm
+
+
+def pdf_to_image(pdf_path: str, save_path: str, db_path: str, verbose: bool = False) -> None:
+ """
+ ์ฃผ์ด์ง PDF ํ์ผ์ ์ด๋ฏธ์ง๋ก ๋ณํํ๊ณ , PDF ํ์ผ์ ์ง์ ๋ ๋๋ ํ ๋ฆฌ๋ก ์ด๋ํ๋ฉฐ, ๋ณํ๋ ์ด๋ฏธ์ง๋ฅผ ์ ์ฅํฉ๋๋ค.
+ ๋ํ ๋ณํํ ์ ๋ณด๋ฅผ `database.csv` ํ์ผ์ ๊ธฐ๋กํฉ๋๋ค.
+
+ Args:
+ pdf_path (str): ๋ณํํ PDF ํ์ผ์ ๊ฒฝ๋ก.
+ save_path (str): ๋ณํ๋ ์ด๋ฏธ์ง์ PDF ํ์ผ์ ์ ์ฅํ ํด๋ ๊ฒฝ๋ก.
+ db_path (str): ๋ฐ์ดํฐ๋ฒ ์ด์ค ๊ฒฝ๋ก
+ verbose (bool, optional): ์ด๋ฏธ์ง ์ ์ฅ ์งํ ์ํฉ์ ์ถ๋ ฅํ ์ง ์ฌ๋ถ (๊ธฐ๋ณธ๊ฐ์ False).
+
+ Returns:
+ None: ํจ์๋ ๋ฐํ๊ฐ์ด ์์ต๋๋ค.
+ """
+
+ # ์ข ๋ชฉ ์ด๋ฆ
+ company_name = os.path.basename(save_path)
+
+ # PDF ํ์ผ ์ด๋ฆ์ ๊ธฐ๋ฐ์ผ๋ก ํด๋ ์ด๋ฆ ์์ฑ (ํ์ฅ์ ์ ์ธ)
+ file_name = os.path.splitext(os.path.basename(pdf_path))[0]
+
+ # ํด๋ ๊ฒฝ๋ก ์์ฑ
+ output_dir = os.path.join(save_path, file_name) # ํ์ฌ ์์ ๋๋ ํ ๋ฆฌ ๋ด์ ์์ฑ
+ os.makedirs(output_dir, exist_ok=True)
+
+ # PDF ํ์ผ ์ด๋
+ new_pdf_path = os.path.join(output_dir, os.path.basename(pdf_path))
+ shutil.move(pdf_path, new_pdf_path)
+
+ # images ์ ์ฅ ํด๋ ์์ฑ
+ output_dir = os.path.join(output_dir, "images") # ํ์ฌ ์์ ๋๋ ํ ๋ฆฌ ๋ด์ ์์ฑ
+ os.makedirs(output_dir, exist_ok=True)
+
+ # PDF๋ฅผ ์ด๋ฏธ์ง๋ก ๋ณํ
+ images = convert_from_path(new_pdf_path, dpi=300)
+
+ # PDF ํ์ด์ง ์
+ num_pages = len(images)
+
+ # ๊ฐ ํ์ด์ง๋ฅผ ์ด๋ฏธ์ง๋ก ์ ์ฅ
+ for page_num, image in enumerate(images, start=1):
+ # ์ด๋ฏธ์ง ํ์ผ๋ช ์ค์
+ output_image_path = os.path.join(output_dir, f"page_{page_num}.png")
+
+ # ์ด๋ฏธ์ง ์ ์ฅ
+ image.save(output_image_path, "PNG")
+ if verbose:
+ print(f"Page {page_num} saved as {output_image_path}")
+
+ # ํ์ผ์ ๋ํ ๋ฉํ ๋ฐ์ดํฐ ๊ธฐ๋ก
+ new_data = pd.DataFrame(
+ {
+ "company_name": [company_name] * num_pages,
+ "file_name": [file_name] * num_pages,
+ "page": [i for i in range(1, num_pages + 1)],
+ }
+ )
+
+ # ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ ๋ฐ์ดํธ
+ if os.path.exists(db_path):
+ database = pd.read_csv(db_path, encoding="utf-8")
+ else:
+ database = pd.DataFrame(columns=["company_name", "file_name", "page"])
+
+ # concat์ผ๋ก ๋ DataFrame์ ๋ณํฉ
+ database = pd.concat([database, new_data], ignore_index=True)
+
+ # 'page' ์ด์ ์ ์ํ์ผ๋ก ๋ณํ
+ database["page"] = database["page"].astype("int")
+
+ # company_name -> file_name -> page ์์ผ๋ก ์ค๋ฆ์ฐจ์ ์ ๋ ฌ
+ database = database.sort_values(by=["company_name", "file_name", "page"], ascending=[True, True, True])
+
+ # database csv๋ก ์ ์ฅ
+ database.to_csv(db_path, index=False, encoding="utf-8")
+
+
+def multi_pdf_to_image(root_dir: str, db_path: str) -> None:
+ """
+ ์ฃผ์ด์ง ๋ฃจํธ ๋๋ ํ ๋ฆฌ ๋ด ๋ชจ๋ ํ์ ๋๋ ํ ๋ฆฌ์์ PDF ํ์ผ์ ์ฐพ์ ๋ณํํ๋ ํจ์์ ๋๋ค.
+ ๊ฐ PDF ํ์ผ์ `pdf_to_image` ํจ์๋ก ์ฒ๋ฆฌ๋์ด ์ด๋ฏธ์ง๋ก ๋ณํ๋ฉ๋๋ค.
+
+ Args:
+ root_dir (str): PDF ํ์ผ์ด ์ ์ฅ๋ ๋ฃจํธ ๋๋ ํ ๋ฆฌ ๊ฒฝ๋ก.
+ db_path (str): ๋ฐ์ดํฐ๋ฒ ์ด์ค ๊ฒฝ๋ก
+
+ Returns:
+ None: ํจ์๋ ๋ฐํ๊ฐ์ด ์์ต๋๋ค.
+ """
+
+ # ๋ฃจํธ ๋๋ ํ ๋ฆฌ ๋ด ๋ชจ๋ ํ์ ๋๋ ํ ๋ฆฌ์ ํ์ผ์ ์ํ
+ for dirpath, _, filenames in os.walk(root_dir):
+ for filename in filenames:
+ # PDF ํ์ผ๋ง ์ฒ๋ฆฌ
+ if filename.lower().endswith(".pdf"):
+ pdf_path = os.path.join(dirpath, filename)
+ print(f"Converting {pdf_path} to images...")
+
+ # ๋์ผํ ๋๋ ํ ๋ฆฌ ๊ตฌ์กฐ๋ฅผ ์ ์งํ๋ฉฐ ์ด๋ฏธ์ง ์ ์ฅ
+ pdf_to_image(pdf_path, dirpath, db_path=db_path, verbose=False)
+
+
+def sort_bounding_boxes(output_data, image_width):
+ def get_columns(data, image_width, threshold_x=0.085, threshold_diff=1, threshold_column=0.1):
+ """
+ Group bounding boxes into columns based on their x_min values.
+ """
+ # ๋ฐ์ดํฐ๋ฅผ ์ ๋ ฌ
+ x_mins = np.array([item["coordinates"][0] for item in data])
+ sorted_x = np.sort(x_mins)
+
+ # ๊ทธ๋ฃน์ ์ ์ฅํ ๋ฆฌ์คํธ
+ grouped = []
+
+ # ์ฒซ ๋ฒ์งธ ๊ฐ์ ์์์ผ๋ก ๊ทธ๋ฃน ์ด๊ธฐํ
+ current_group = [sorted_x[0]]
+
+ # ์ ๋ ฌ๋ ๋ฐ์ดํฐ๋ฅผ ์ํ
+ for i in range(1, len(sorted_x)):
+ if abs(sorted_x[i] - current_group[-1]) <= image_width * threshold_x:
+ # threshold ์ด๋ด์ ๊ฐ์ ๊ฐ์ ๊ทธ๋ฃน์ผ๋ก ์ถ๊ฐ
+ current_group.append(sorted_x[i])
+ else:
+ # ๊ทธ๋ฃน์ ์ ์ฅํ๊ณ ์ ๊ทธ๋ฃน ์์
+ grouped.append(current_group)
+ current_group = [sorted_x[i]]
+
+ # ๋ง์ง๋ง ๊ทธ๋ฃน ์ถ๊ฐ
+ grouped.append(current_group)
+
+ grouped_count = list(map(len, grouped))
+ # 1. grouped_count์ ์ค๋ฆ์ฐจ์ ์ ๋ ฌ (์๋ ์ธ๋ฑ์ค ์ถ์ )
+ sorted_indices = np.argsort(grouped_count) # ์ ๋ ฌ๋ ์ธ๋ฑ์ค
+ sorted_grouped_count = [grouped_count[i] for i in sorted_indices] # ์ ๋ ฌ๋ grouped_count
+
+ # 2. diff ๊ณ์ฐ
+ diffs = np.diff(sorted_grouped_count)
+
+ # 3. diff๊ฐ ํน์ ์๊ณ๊ฐ ์ด์์ผ๋ก ์ฆ๊ฐํ ์ง์ ์ฐพ๊ธฐ
+ sudden_increase_indices = np.where(diffs >= threshold_diff)[0] + 1 # +1์ diff์ ๊ฒฐ๊ณผ๊ฐ n-1 ๊ธธ์ด์ด๊ธฐ ๋๋ฌธ
+
+ if len(sudden_increase_indices) != 0:
+ # 4. ๊ฐ์์ค๋ฌ์ด ๋ณํ ์ดํ์ ์๋ ์ธ๋ฑ์ค ์ฐพ๊ธฐ
+ original_indices = sorted_indices[sudden_increase_indices[0] :]
+ mode_components_list = [grouped[i] for i in original_indices]
+ x_column_boundary = [min(mode_components) for mode_components in mode_components_list]
+ x_column_boundary.sort()
+ column_bounds = [(0, x_column_boundary[0])]
+ for i in range(len(x_column_boundary) - 1):
+ column_bounds.append((x_column_boundary[i], x_column_boundary[i + 1]))
+ column_bounds.append((x_column_boundary[-1], float("inf")))
+ else: # ๋ค๋จ์ ๋๋์ด์ ธ ์๋๋ฐ ๋ค๋จ ์์ฒด๊ฐ ๋ฐ์ด๋ฉ ๋ฐ์ค ํ๋๋ก ํฌ๊ฒ ์ด๋ฃจ์ด์ ธ์์ผ๋ฉด
+ # ์ต๋น๊ฐ์ด 1๋ก ๋๋ฅ ์ผ ๊ฒฝ์ฐ x_min ์ขํ ์ฌ์ด ๊ฐ๊ฒฉ์ ๋ถ์ํด์ ์ขํ ์ฌ์ด ๊ฐ๊ฒฉ์ด ๊ฐ์๊ธฐ ์ปค์ง๋ ๊ณณ์ ๋ค๋จ์ผ๋ก ์ธ์ํ๊ฒ ํ๋ค.
+ gaps = np.diff(sorted_x)
+ column_threshold = threshold_column * (sorted_x[-1] - sorted_x[0])
+ column_indices = np.where(gaps > column_threshold)[0]
+
+ columns = []
+ start = 0
+ for idx in column_indices:
+ columns.append(sorted_x[start : idx + 1])
+ start = idx + 1
+ columns.append(sorted_x[start:])
+
+ column_bounds = [[col.min(), col.max()] for col in map(np.array, columns)]
+ column_bounds.insert(0, (0, column_bounds[0][0]))
+ for i in range(1, len(column_bounds) - 1):
+ column_bounds[i][1] = column_bounds[i + 1][0]
+ column_bounds.append((column_bounds[-1][1], float("inf")))
+ return column_bounds
+
+ def assign_column(box, column_bounds):
+ """Assign a bounding box to its column."""
+ x_min = box["coordinates"][0] # bounding box์ x_min ๊ฐ์ ๊ฐ์ ธ์ด
+ for idx, (col_min, col_max) in enumerate(column_bounds): # ๊ฐ ์ปฌ๋ผ์ ๊ฒฝ๊ณ ํ์ธ
+ if col_min <= x_min < col_max: # x_min์ด ์ปฌ๋ผ ๊ฒฝ๊ณ ์์ ์์ผ๋ฉด
+ return idx # ํด๋น ์ปฌ๋ผ์ ์ธ๋ฑ์ค๋ฅผ ๋ฐํ
+ return len(column_bounds) # ์ปฌ๋ผ ๊ฒฝ๊ณ์ ์ํ์ง ์์ผ๋ฉด ๋ง์ง๋ง ์ธ๋ฑ์ค๋ฅผ ๋ฐํ
+
+ def fuzzy_comparator(box1, box2):
+ # ๋ ๋ฐ์ค์ x_min, y_min ์ขํ ์ถ์ถ
+ x1, y1, _, _ = box1["coordinates"]
+ x2, y2, _, _ = box2["coordinates"]
+
+ y_threshold = 32
+
+ # y์ขํ๊ฐ ๋น์ทํ๋ฉด x์ขํ ๊ธฐ์ค์ผ๋ก ๋น๊ต
+ if abs(y1 - y2) <= y_threshold:
+ return x1 - x2
+ # ๊ทธ๋ ์ง ์์ผ๋ฉด y์ขํ ๊ธฐ์ค์ผ๋ก ๋น๊ต
+ return y1 - y2
+
+ def sort_within_column(boxes):
+ """Sort boxes within a column by y_min, then x_min."""
+ return sorted(boxes, key=cmp_to_key(fuzzy_comparator))
+ # return sorted(boxes, key=lambda b: (b['coordinates'][1], b['coordinates'][0]))
+
+ # Step 1: Detect columns based on x_min values
+ column_bounds = get_columns(output_data, image_width)
+ if not column_bounds:
+ tolerance = 0.05
+ sorted_boxes = sorted(
+ output_data, key=lambda b: ((b["coordinates"][1] // tolerance) * tolerance, b["coordinates"][0])
+ )
+ return sorted_boxes
+ else:
+ column_data = defaultdict(list)
+
+ for box in output_data:
+ column_idx = assign_column(box, column_bounds)
+ column_data[column_idx].append(box)
+
+ # Step 2: Sort columns based on width (wide to narrow or left to right if similar)
+ sorted_columns = sorted(
+ column_data.items(),
+ key=lambda c: (
+ -(max(box["coordinates"][2] for box in c[1]) - min(box["coordinates"][0] for box in c[1])),
+ c[0],
+ ),
+ )
+
+ # Step 3: Sort boxes within each column
+ sorted_boxes = []
+ for _, boxes in sorted_columns:
+ sorted_boxes.extend(sort_within_column(boxes))
+
+ return sorted_boxes
+
+
+def extract_and_save_bounding_boxes(
+ image_path,
+ database_path,
+ model_path="~/.cache/huggingface/hub/models--juliozhao--DocLayout-YOLO-DocStructBench/snapshots/8c3299a30b8ff29a1503c4431b035b93220f7b11/doclayout_yolo_docstructbench_imgsz1024.pt",
+ res_path="outputs",
+ imgsz=1024,
+ line_width=5,
+ font_size=20,
+ conf=0.2,
+ split_images_foler_name="split_images",
+ threshold=0.05,
+ verbose=False,
+):
+ # Automatically select device
+ device = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
+
+ try:
+ model = YOLOv10(model_path)
+
+ except Exception as e:
+ print(f"์ง์ ๋ ๊ฒฝ๋ก์์ ๋ชจ๋ธ์ ๋ก๋ํ ์ ์์ต๋๋ค: {e}")
+ print("Hugging Face์์ ๋ชจ๋ธ์ ๋ค์ด๋ก๋ํฉ๋๋ค...")
+ try:
+ model_path = hf_hub_download(
+ repo_id="juliozhao/DocLayout-YOLO-DocStructBench", filename="doclayout_yolo_docstructbench_imgsz1024.pt"
+ )
+ model = YOLOv10(model_path)
+ print(f"๋ชจ๋ธ์ ์ฑ๊ณต์ ์ผ๋ก ๋ค์ด๋ก๋ํ์ต๋๋ค: {model_path}")
+ except Exception as e:
+ raise Exception(f"๋ชจ๋ธ ๋ค์ด๋ก๋ ์คํจ: {e}")
+
+ det_res = model.predict(
+ image_path,
+ imgsz=imgsz,
+ conf=conf,
+ device=device,
+ )
+ annotated_frame = det_res[0].plot(pil=True, line_width=line_width, font_size=font_size)
+ if not os.path.exists(res_path):
+ os.makedirs(res_path)
+ output_path = os.path.join(res_path, image_path.split("/")[-1].replace(".png", "_annotated.png"))
+ cv2.imwrite(output_path, annotated_frame)
+ print(f'The result was saved to "{output_path}"')
+
+ # ํด๋์ค ID์ ์ด๋ฆ ๋งคํ
+ CLASS_LABELS = {
+ 0: "title",
+ 1: "plain text",
+ 2: "abandon",
+ 3: "figure",
+ 4: "figure_caption",
+ 5: "table",
+ 6: "table_caption",
+ 7: "table_footnote",
+ 8: "isolate_formula",
+ 9: "formula_caption",
+ }
+
+ image = cv2.imread(image_path)
+
+ # ๊ฒฐ๊ณผ ์ ์ฅ ๋๋ ํ ๋ฆฌ ์์ฑ
+ output_dir = os.path.join(res_path, f"{split_images_foler_name}")
+ print(f'Split images were saved to "{output_dir}"')
+ os.makedirs(output_dir, exist_ok=True)
+
+ # ํด๋์ค๋ณ ๊ณ ์ ์ธ๋ฑ์ค ๊ด๋ฆฌ
+ class_indices = defaultdict(int) # ๊ฐ ํด๋์ค๋ณ ์ ์ฅ ์ธ๋ฑ์ค
+
+ output_data = []
+ unique_boxes = {} # ์ค๋ณต๋ ๋ฐ์ค๋ฅผ ๋ฐฉ์งํ๊ณ ์ต๊ณ ํ๋ฅ ๋ก ์ ์ฅํ๊ธฐ ์ํ ๋์ ๋๋ฆฌ
+
+ for box in det_res[0].boxes.data:
+ # Bounding Box ์ ๋ณด ์ถ์ถ
+ x_min, y_min, x_max, y_max = map(int, box[:4].cpu().numpy()) # ์ขํ
+ confidence = box[4].cpu().numpy() # ์ ๋ขฐ๋ ์ ์
+ class_id = int(box[5].cpu().numpy()) # ํด๋์ค ID
+ class_name = CLASS_LABELS.get(class_id, "Unknown") # ํด๋์ค ์ด๋ฆ ๋งคํ
+
+ # ์ขํ๋ฅผ ๊ธฐ์ค์ผ๋ก ์ค๋ณต ์ฒดํฌ ๋ฐ ์ต๊ณ ํ๋ฅ ์ ์ง
+ box_tuple = (x_min, y_min, x_max, y_max)
+
+ # ์ค๋ณต ๋ฐ์ค๋ฅผ ์ฒดํฌ
+ overlap_found = False
+ for existing_key, existing_box in list(unique_boxes.items()):
+ existing_coordinates = existing_box["coordinates"]
+
+ x_min1, y_min1, x_max1, y_max1 = x_min, y_min, x_max, y_max
+ x_min2, y_min2, x_max2, y_max2 = existing_coordinates
+
+ # ๊ต์งํฉ ์์ญ์ ์ขํ ๊ณ์ฐ
+ x_min_inter = max(x_min1, x_min2)
+ y_min_inter = max(y_min1, y_min2)
+ x_max_inter = min(x_max1, x_max2)
+ y_max_inter = min(y_max1, y_max2)
+
+ # ๊ต์งํฉ ๋ฉด์
+ if x_max_inter - x_min_inter > 0 and y_max_inter - y_min_inter > 0:
+ intersection_area = (x_max_inter - x_min_inter) * (y_max_inter - y_min_inter)
+ else:
+ intersection_area = 0
+
+ # ๋ ๋ฐ์ค์ ๋ฉด์ ๊ณ์ฐ
+ area1 = (x_max1 - x_min1) * (y_max1 - y_min1)
+ area2 = (x_max2 - x_min2) * (y_max2 - y_min2)
+
+ if area1 - intersection_area < threshold * area1 and area2 - intersection_area < threshold * area2:
+ # ๋ ๋ฐ์ค๊ฐ ๊ฑฐ์ ์ผ์นํ๋ฉด, ํ๋ฅ ์ด ๋ ๋์ ๋ฐ์ค๋ก ๊ต์ฒด
+ if confidence > existing_box["confidence"]:
+ del unique_boxes[existing_key]
+ if box_tuple not in unique_boxes.keys():
+ unique_boxes[box_tuple] = {
+ "class_name": class_name,
+ "confidence": confidence,
+ "coordinates": [x_min, y_min, x_max, y_max],
+ }
+ overlap_found = True
+ elif area1 < area2 and area1 - intersection_area < threshold * area1:
+ # ํ์ฌ ๋ฐ์ค๊ฐ ๋ ์์ ๊ฒฝ์ฐ, ๊ธฐ์กด ๋ฐ์ค๋ฅผ ์ ๊ฑฐ
+ del unique_boxes[existing_key]
+ unique_boxes[box_tuple] = {
+ "class_name": class_name,
+ "confidence": confidence,
+ "coordinates": [x_min, y_min, x_max, y_max],
+ }
+ overlap_found = True
+ elif area2 < area1 and area2 - intersection_area < threshold * area2:
+ # ๊ธฐ์กด ๋ฐ์ค๊ฐ ๋ ์์ ๊ฒฝ์ฐ, ํ์ฌ ๋ฐ์ค๋ฅผ ์ถ๊ฐํ์ง ์์
+ overlap_found = True
+
+ # ์ค๋ณต์ด ์์ผ๋ฉด ์๋ก์ด ๋ฐ์ค๋ฅผ ์ถ๊ฐ
+ if not overlap_found:
+ unique_boxes[box_tuple] = {
+ "class_name": class_name,
+ "confidence": confidence,
+ "coordinates": [x_min, y_min, x_max, y_max],
+ }
+
+ print("num_split_images: {num_split_images}".format(num_split_images=len(unique_boxes)))
+
+ # ๊ฒฐ๊ณผ ์ ์ฅ ๋ฐ ์ด๋ฏธ์ง ์๋ฅด๊ธฐ
+ for _, box_info in unique_boxes.items():
+ x_min, y_min, x_max, y_max = box_info["coordinates"]
+ class_name = box_info["class_name"]
+ confidence = box_info["confidence"]
+
+ # ํด๋์ค๋ณ ๊ณ ์ ์ธ๋ฑ์ค ์ถ๊ฐ
+ class_index = class_indices[class_name] + 1
+ class_indices[class_name] += 1
+
+ # ์ ๋ณด ์ ์ฅ
+ output_data.append(
+ {
+ "box_id": class_index,
+ "class_name": class_name,
+ "confidence": float(confidence),
+ "coordinates": [x_min, y_min, x_max, y_max],
+ }
+ )
+
+ # ๋ฉํ ๋ฐ์ดํฐ ์์ฑ
+ dir_path = os.path.dirname(image_path)
+ path_parts = dir_path.split("/")
+ company_name = path_parts[-3]
+ file_name = path_parts[-2]
+ page = os.path.splitext(os.path.basename(image_path))[0]
+ page = int(page.split("_")[-1])
+
+ # ouput_data๋ฅผ ๋ค๋จ์ ๋ฐ๋ผ ์์์ ์๋๋ก ์ฝ๊ณ ๋ค๋ฅธ ๋ค๋จ์ ์์์ ์๋๋ก ์ฝ๋ ์์๋ก ์ ๋ ฌ
+ output_data = sort_bounding_boxes(output_data, image.shape[1])
+
+ # ์ ์ฅ๋ ๋ฐ์ดํฐ ํ์ธ
+ if verbose:
+ for data in output_data:
+ print(data)
+
+ # ํ์ผ์ ๋ํ ๋ฉํ ๋ฐ์ดํฐ ๊ธฐ๋ก
+ num_page_components = len(unique_boxes)
+ new_data = pd.DataFrame(
+ {
+ "company_name": [company_name] * num_page_components,
+ "file_name": [file_name] * num_page_components,
+ "page": [page] * num_page_components,
+ "component_index": [i for i in range(1, len(output_data) + 1)],
+ "component_type": [component["class_name"] for component in output_data],
+ "component_type_sub_index": [component["box_id"] for component in output_data],
+ "coordinates-x_min,y_min,x_max,y_max": [
+ component["coordinates"] for component in output_data
+ ], # left, top, right, bottom
+ "component_type_confidence": [component["confidence"] for component in output_data],
+ }
+ )
+
+ # ๊ฐ component_type์ ๋ํด ๋ณ๋๋ก 'box_id' ๋งค๊ธฐ๊ธฐ
+ new_data["component_type_sub_index"] = new_data.groupby("component_type").cumcount() + 1
+ new_data["component_index"] = range(1, len(new_data) + 1)
+
+ for _, row in new_data.iterrows():
+ # ๋ฐ์ค ์์ญ ์๋ผ๋ด๊ธฐ
+ x_min, y_min, x_max, y_max = row["coordinates-x_min,y_min,x_max,y_max"]
+ cropped_image = image[y_min:y_max, x_min:x_max]
+
+ # ์๋ผ๋ธ ์ด๋ฏธ์ง ์ ์ฅ
+ cropped_image_path = os.path.join(
+ output_dir, f"{row['component_index']}_{row['component_type']}_{row['component_type_sub_index']}.png"
+ )
+ cv2.imwrite(cropped_image_path, cropped_image)
+ database = pd.read_csv(database_path, encoding="utf-8")
+
+ # ์กฐ๊ฑด์ ๋ง๋ ํ ์ธ๋ฑ์ค๋ฅผ ์ฐพ๊ธฐ
+ matching_indices = database.loc[
+ (database["company_name"] == company_name) & (database["file_name"] == file_name) & (database["page"] == page)
+ ].index
+ matching_indices = matching_indices[0]
+
+ # ๊ธฐ์กด DataFrame์์ ํ์ฌ ์ ๋ ฅ ์ด๋ฏธ์ง์ company_name, file_name, page์ ๋์ํ๋ ํ์ ์ญ์ ํ๊ณ new_data๋ฅผ ์ฝ์ ํ๋ ๋ฐฉ์์ผ๋ก
+ # ๋ฌธ์ ํ์ด์ง ์ด๋ฏธ์ง๊ฐ ์ฌ๋ฌ components๋ก ๋๋์ด์ก์ผ๋ฏ๋ก components์ ๋์ํ๋ ์ฌ๋ฌ ํ์ผ๋ก ๊ธฐ์กด ํ๋์ ํ์ ๊ต์ฒด
+ database = pd.concat(
+ [database.iloc[:matching_indices], new_data, database.iloc[matching_indices + 1 :]]
+ ).reset_index(drop=True)
+
+ # database csv๋ก ์ ์ฅ
+ database.to_csv(database_path, index=False, encoding="utf-8")
+
+ print(f"{company_name}|{file_name}|{page} conversion completed.\n")
+
+ return det_res, output_data
+
+
+def multi_extract_and_save_bounding_boxes(
+ root_dir: str,
+ extract_and_save_bounding_boxes: Callable[
+ [str, str, str, str, int, int, int, float, str, float, bool], Tuple[Dict, List]
+ ],
+ **kwargs: Any,
+) -> None:
+ """
+ ๋ฃจํธ ํด๋ ๋ด์์ ํน์ ํ์์ ์ด๋ฏธ์ง ํ์ผ์ ์ฒ๋ฆฌํ๊ณ ,
+ ๊ฒฐ๊ณผ๋ฅผ ์ด๋ฏธ์ง ์ด๋ฆ(ํ์ฅ์ ์ ๊ฑฐ)๊ณผ ๋์ผํ ํ์ ํด๋์ ์ ์ฅํ๋ ํจ์.
+
+ ์ด ํจ์๋ ํ์ผ๋ช ์ด "page_์ซ์" ํ์์ธ ์ด๋ฏธ์ง ํ์ผ์ ์๋ณํ ํ,
+ ์ฃผ์ด์ง extract_and_save_bounding_boxes๋ฅผ ์ฌ์ฉํด ๊ฐ ์ด๋ฏธ์ง๋ฅผ ์ฒ๋ฆฌํฉ๋๋ค.
+
+ Args:
+ root_dir (str): ์ด๋ฏธ์ง ํ์ผ์ด ์ ์ฅ๋ ๋ฃจํธ ํด๋ ๊ฒฝ๋ก.
+ extract_and_save_bounding_boxes (Callable[..., None]):
+ ๋จ์ผ ์ด๋ฏธ์ง๋ฅผ ์ฒ๋ฆฌํ๋ ํจ์. ๋ค์ ๋งค๊ฐ๋ณ์๋ฅผ ๊ฐ์ ธ์ผ ํฉ๋๋ค:
+ - image_path (str): ์ฒ๋ฆฌํ ์ด๋ฏธ์ง ํ์ผ์ ๊ฒฝ๋ก.
+ - res_path (str): ์ฒ๋ฆฌ๋ ๊ฒฐ๊ณผ๋ฅผ ์ ์ฅํ ํด๋ ๊ฒฝ๋ก.
+ - ์ถ๊ฐ์ ์ธ ํค์๋ ์ธ์ (**kwargs).
+ **kwargs (Any): extract_and_save_bounding_boxes์ ์ ๋ฌ๋ ์ถ๊ฐ ๋งค๊ฐ๋ณ์.
+
+ Returns:
+ None
+ """
+ # ์ด๋ฏธ์ง ํ์ฅ์ ์ ์
+ valid_extensions = (".jpg", ".jpeg", ".png", ".bmp", ".tiff")
+
+ # ์ ๊ท์ ํจํด: ํ์ผ๋ช ์ด page_์ซ์ ํ์์ธ์ง ํ์ธ
+ page_pattern = re.compile(r"^page_\d+$")
+
+ # ๋ฃจํธ ํด๋์์ ํ์ผ ๊ฒ์
+ all_image_paths = [
+ os.path.join(dp, f)
+ for dp, dn, filenames in os.walk(root_dir)
+ for f in filenames
+ if f.lower().endswith(valid_extensions) and page_pattern.match(os.path.splitext(f)[0])
+ ]
+
+ for image_path in tqdm(all_image_paths, desc="Processing Images", unit="image"):
+ # ํ์ฌ ์ด๋ฏธ์ง ํ์ผ์ด ์์นํ ํด๋ ๊ฒฝ๋ก
+ current_folder = os.path.dirname(image_path)
+
+ # ํ์ผ ์ด๋ฆ์์ ํ์ฅ์๋ฅผ ์ ๊ฑฐํด ์ถ๋ ฅ ํด๋๋ช ์์ฑ
+ image_name = os.path.splitext(os.path.basename(image_path))[0] # image_name(ํ์ฅ์ ์ ๊ฑฐ ํ์ผ ์ด๋ฆ) ์์: page_1
+ output_folder = os.path.join(current_folder, image_name) # ์ด๋ฆ์ด ํ์ผ ์ด๋ฆ์ด๋ ๊ฐ์ ํด๋ ๊ฒฝ๋ก
+
+ # ์ถ๋ ฅ ํด๋๊ฐ ์ด๋ฏธ ์กด์ฌํ๋ฉด ์ญ์
+ if os.path.exists(output_folder):
+ print("The output folder already exists. It will be deleted and recreated.")
+ shutil.rmtree(output_folder)
+
+ # ์ถ๋ ฅ ํด๋ ์์ฑ
+ os.makedirs(output_folder, exist_ok=True)
+
+ # extract_and_save_bounding_boxes ํธ์ถ
+ try:
+ extract_and_save_bounding_boxes(image_path=image_path, res_path=output_folder, **kwargs)
+ print("Save completed")
+ except Exception as e:
+ print(f"An error occurred while processing {image_path}: {e}")
+
+ print("All images have been processed successfully.")
+
+
+def pdf_parsing_pipeline(config=None) -> None:
+
+ # ์ค์
+ cfg = get_config(config)
+
+ # ๊ฒฝ๋ก ์ค์
+ root_dir = cfg["DIRS"]["input_dir"]
+ db_path = os.path.join(cfg["DIRS"]["database_dir"], cfg["FILES"]["database"])
+ ocr_output_dir = cfg["DIRS"]["ocr_output_dir"]
+
+ # PDF -> ์ด๋ฏธ์ง ๋ณํ
+ multi_pdf_to_image(root_dir=root_dir, db_path=db_path)
+
+ # ๋ฐ์ดํฐ๋ฒ ์ด์ค ๋ฐฑ์
+ shutil.copy(db_path, db_path.replace(".csv", "_temp.csv"))
+
+ # ์ด๋ฏธ์ง -> ๋ฐ์ด๋ฉ ๋ฐ์ค ์ถ์ถ
+ multi_extract_and_save_bounding_boxes(
+ root_dir=root_dir,
+ extract_and_save_bounding_boxes=extract_and_save_bounding_boxes,
+ database_path=db_path,
+ model_path=cfg["MODEL"]["path"],
+ imgsz=cfg["MODEL"]["imgsz"],
+ line_width=cfg["MODEL"]["line_width"],
+ font_size=cfg["MODEL"]["font_size"],
+ split_images_foler_name="split_images",
+ conf=cfg["MODEL"]["conf"],
+ threshold=cfg["MODEL"]["threshold"],
+ verbose=False,
+ )
+
+ # OCR ์ฒ๋ฆฌ
+ from ocr_processor import OCRProcessor
+
+ processor = OCRProcessor(base_folder=root_dir, output_folder=ocr_output_dir)
+ processor.process_folders()
+
+ print("์ ์ฒด ํ์ดํ๋ผ์ธ ์ฒ๋ฆฌ๊ฐ ์๋ฃ๋์์ต๋๋ค.")
+
+
+def parse_args():
+ """
+ ์ปค๋งจ๋ ๋ผ์ธ ์ธ์๋ฅผ ํ์ฑํฉ๋๋ค.
+ """
+ parser = argparse.ArgumentParser(description="PDF ํ์ผ์ ์ฒ๋ฆฌํ์ฌ OCR์ ์ํํ๋ ํ์ดํ๋ผ์ธ")
+
+ parser.add_argument("--input", "-i", type=str, help="์ ๋ ฅ PDF ํ์ผ ๋๋ PDF ํ์ผ๋ค์ด ์๋ ๋๋ ํ ๋ฆฌ ๊ฒฝ๋ก")
+
+ parser.add_argument(
+ "--output-dir", "-o", type=str, default=None, help="๊ฒฐ๊ณผ๋ฌผ์ ์ ์ฅํ ๋๋ ํ ๋ฆฌ (๊ธฐ๋ณธ๊ฐ: ํ๋ก์ ํธ ๋ฃจํธ ๋๋ ํ ๋ฆฌ)"
+ )
+
+ parser.add_argument("--recursive", "-r", action="store_true", help="๋๋ ํ ๋ฆฌ ์ ๋ ฅ์ ํ์ ๋๋ ํ ๋ฆฌ๋ ์ฒ๋ฆฌ")
+
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ args = parse_args()
+
+ # ์ ๋ ฅ ๊ฒฝ๋ก ์ฒ๋ฆฌ
+ input_path = Path(args.input).resolve() if args.input else None
+ if not input_path or not input_path.exists():
+ raise ValueError(f"์ ๋ ฅ ๊ฒฝ๋ก๊ฐ ์ ํจํ์ง ์์ต๋๋ค: {args.input}")
+
+ # ์ถ๋ ฅ ๋๋ ํ ๋ฆฌ ์ค์
+ if args.output_dir:
+ output_base = Path(args.output_dir).resolve()
+ else:
+ output_base = Path(__file__).parent
+
+ # ์ค์ ๊ตฌ์ฑ
+ custom_config = {
+ "DIRS": {
+ "input_dir": str(input_path.parent if input_path.is_file() else input_path),
+ "output_dir": str(output_base / "output"),
+ "database_dir": str(output_base / "database"),
+ "ocr_output_dir": str(output_base / "ocr_results"),
+ }
+ }
+
+ # PDF ํ์ผ ์ฒ๋ฆฌ
+ if input_path.is_file() and input_path.suffix.lower() == ".pdf":
+ # ๋จ์ผ PDF ํ์ผ ์ฒ๋ฆฌ
+ if not input_path.parent.samefile(Path(custom_config["DIRS"]["input_dir"])):
+ # ์ ๋ ฅ ๋๋ ํ ๋ฆฌ๋ก PDF ํ์ผ ๋ณต์ฌ
+ os.makedirs(custom_config["DIRS"]["input_dir"], exist_ok=True)
+ shutil.copy2(input_path, Path(custom_config["DIRS"]["input_dir"]) / input_path.name)
+
+ elif input_path.is_dir():
+ # ์ ๋ ฅ ๋๋ ํ ๋ฆฌ๊ฐ ์ฒ๋ฆฌ ๋๋ ํ ๋ฆฌ์ ๋ค๋ฅธ ๊ฒฝ์ฐ์๋ง ํ์ผ ๋ณต์ฌ
+ if not input_path.samefile(Path(custom_config["DIRS"]["input_dir"])):
+ # ๋๋ ํ ๋ฆฌ ์์ฑ
+ os.makedirs(custom_config["DIRS"]["input_dir"], exist_ok=True)
+
+ # PDF ํ์ผ ๋ณต์ฌ
+ if args.recursive:
+ # ์ฌ๊ท์ ์ผ๋ก ๋ชจ๋ PDF ํ์ผ ๋ณต์ฌ
+ for pdf_file in input_path.rglob("*.pdf"):
+ relative_path = pdf_file.relative_to(input_path)
+ target_path = Path(custom_config["DIRS"]["input_dir"]) / relative_path
+ os.makedirs(target_path.parent, exist_ok=True)
+ shutil.copy2(pdf_file, target_path)
+ else:
+ # ํ์ฌ ๋๋ ํ ๋ฆฌ์ PDF ํ์ผ๋ง ๋ณต์ฌ
+ for pdf_file in input_path.glob("*.pdf"):
+ target_path = Path(custom_config["DIRS"]["input_dir"]) / pdf_file.name
+ os.makedirs(target_path.parent, exist_ok=True)
+ shutil.copy2(pdf_file, target_path)
+
+ # ํ์ดํ๋ผ์ธ ์คํ
+ pdf_parsing_pipeline(custom_config)
+ sys.exit(0)
diff --git a/PDF_OCR/requirements.txt b/PDF_OCR/requirements.txt
new file mode 100644
index 0000000..c0ae835
--- /dev/null
+++ b/PDF_OCR/requirements.txt
@@ -0,0 +1,4 @@
+torch==2.5.1
+torchvision==0.20.0
+doclayout-yolo==0.0.2
+pdf2image==1.16.1
diff --git a/PDF_OCR/table_converter.py b/PDF_OCR/table_converter.py
new file mode 100644
index 0000000..20b0e71
--- /dev/null
+++ b/PDF_OCR/table_converter.py
@@ -0,0 +1,97 @@
+from typing import Dict, List, Union
+
+import json
+import os
+import warnings
+from pathlib import Path
+
+import pandas as pd
+from bs4 import BeautifulSoup
+
+warnings.filterwarnings("ignore")
+
+
+def json_to_table(json_data: Union[str, Dict]) -> pd.DataFrame:
+
+ # JSON ๋ฐ์ดํฐ ๋ก๋
+ if isinstance(json_data, str):
+ with open(json_data, "r", encoding="utf-8") as f:
+ data = json.load(f)
+ else:
+ data = json_data
+
+ try:
+ html = data["content"]["html"]
+
+ # beautifulsoup๋ก html ํ์ฑ
+ soup = BeautifulSoup(html, "html.parser")
+
+ # html์์ ํ ์ด๋ธ ์ถ์ถ
+ df = pd.read_html(str(soup))[0]
+
+ # csv ์ ์ฅ
+ return df
+ # print(f"์ฒ๋ฆฌ ์๋ฃ: {output_base} : {file}")
+
+ except Exception as e:
+ print(f"ํ ์ด๋ธ ๋ฐ์ดํฐ ๋ณํ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}")
+
+
+def convert_json_to_csv(
+ input_path: Union[str, Path], output_path: Union[str, Path] = None, recursive: bool = False
+) -> None:
+ """
+ Args:
+ input_path (Union[str, Path]): JSON ํ์ผ ๋๋ ๋๋ ํ ๋ฆฌ ๊ฒฝ๋ก
+ output_path (Union[str, Path], optional): ์ถ๋ ฅ ๊ฒฝ๋ก.
+ ์ง์ ํ์ง ์์ผ๋ฉด ์ ๋ ฅ ํ์ผ๊ณผ ๋์ผํ ์์น์ ์ ์ฅ
+ recursive (bool, optional): ๋๋ ํ ๋ฆฌ ์ฒ๋ฆฌ์ ํ์ ๋๋ ํ ๋ฆฌ๋ ์ฒ๋ฆฌํ ์ง ์ฌ๋ถ
+ """
+ input_path = Path(input_path)
+
+ if output_path:
+ output_path = Path(output_path)
+ if not output_path.exists():
+ output_path.mkdir(parents=True)
+
+ def process_file(json_path: Path) -> None:
+ try:
+ # JSON ํ์ผ์ด ํ ์ด๋ธ ๊ฒฐ๊ณผ๋ฅผ ํฌํจํ๋์ง ํ์ธ
+ if not json_path.stem.endswith("_result"):
+ return
+
+ # ์ถ๋ ฅ ๊ฒฝ๋ก ์ค์
+ if output_path:
+ csv_path = output_path / f"{json_path.stem.replace('_result', '')}.csv"
+ else:
+ csv_path = json_path.parent / f"{json_path.stem.replace('_result', '')}.csv"
+
+ # ๋ณํ ๋ฐ ์ ์ฅ
+ table_df = json_to_table(str(json_path))
+ table_df.to_csv(csv_path, encoding="utf-8-sig")
+ print(f"๋ณํ ์๋ฃ: {csv_path}")
+
+ except Exception as e:
+ print(f"ํ์ผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์ ({json_path.name}): {str(e)}")
+
+ # ๋จ์ผ ํ์ผ ์ฒ๋ฆฌ
+ if input_path.is_file():
+ process_file(input_path)
+ return
+
+ # ๋๋ ํ ๋ฆฌ ์ฒ๋ฆฌ
+ if recursive:
+ json_files = input_path.rglob("*.json")
+ else:
+ json_files = input_path.glob("*.json")
+
+ for json_file in json_files:
+ process_file(json_file)
+
+
+def main():
+ convert_json_to_csv("../../PDF_OCR/new_data/All_data/table.json")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..0e75f9b
--- /dev/null
+++ b/README.md
@@ -0,0 +1,231 @@
+# Level 4. ์ฆ๊ถ์ฌ ์๋ฃ ๊ธฐ๋ฐ ์ฃผ์ LLM ์๋น์ค
+
+# **ํ๋ก์ ํธ ๊ฐ์**
+
+### **ํ๋ก์ ํธ ์ฃผ์ **
+
+1. ์ฃผ์
+ - ์ฆ๊ถ์ฌ ์๋ฃ ๊ธฐ๋ฐ ์ฃผ์ LLM ์๋น์ค ๊ฐ๋ฐ
+2. ์๊ตฌ์ฌํญ
+ - PDF ๋ฌธ์๋ก๋ถํฐ ํ ์คํธ, ๊ทธ๋ํ ๋ฑ ์ ๋ณด์ ์ถ์ถ
+ - ๋ฐ์ดํฐ ๋ ํฌ์งํ ๋ฆฌ ๊ตฌ์ถ(GraphDB, VectorDB ๋ฑ)
+ - ์ฟผ๋ฆฌ์ ๋ํด ๊ฐ์ฅ ์ ํฉํ ๋ฐ์ดํฐ๋ฅผ ์ฐพ์๋ด๋ RAG ์์คํ ๊ตฌํ
+ - ํ๋กฌํํธ ๊ฐ๋ฐ
+ - ๋ต๋ณ ์์ฑ
+ - Q&A ๊ธฐ๋ฅ: ์ ๋ํ๊ฐ ๋ชฉ์
+ - REST API ๋ก ๊ตฌํ
+ - Input: query(์ง์)
+ - Output: context(์ฐธ์กฐํ ์คํธ), answer(๋ต๋ณ)
+
+### **๋ฐ์ดํฐ์ **
+
+1. ์ ๊ณต๋ ๋ฐ์ดํฐ
+ - ์ฆ๊ถ์ฌ ์๋ฃ ํ์ผ(PDF) 100๊ฐ
+
+### **ํ๊ฐ ๋ฐฉ๋ฒ**
+
+1. ์ ๋ํ๊ฐ 50%
+ - ํ ์คํธ์ ์ง์์ ๋ํ ๋ต๋ณ ์ฑ๋ฅ โ ์งํ G-Eval
+2. ์ ์ฑํ๊ฐ 50%
+ - ์๋น์ค์ ์ฐฝ์์ฑ, ์ ์ฉ์ฑ, ๊ฐ๋ฐ ์์ฑ๋, ์์ค์ฝ๋ ํ์ง, ๋ฌธ์ํ ์์ค
+
+
+
+# **๐จ๐ปโ๐ปย ํ์ ์๊ฐ ๋ฐ ์ญํ **
+
+