diff --git a/.github/.github/COMMIT_CONVENTION.md b/.github/.github/COMMIT_CONVENTION.md new file mode 100644 index 000000000..076ff9cfe --- /dev/null +++ b/.github/.github/COMMIT_CONVENTION.md @@ -0,0 +1,130 @@ +## Git Commit Message Convention + +> This is adapted from [Conventional Commits 1.0.0](https://www.conventionalcommits.org/en/v1.0.0/). + +## Summary + +The Conventional Commits specification is a lightweight convention on top of commit messages. +It provides an easy set of rules for creating an explicit commit history; +which makes it easier to write automated tools on top of. +This convention dovetails with [SemVer](http://semver.org), +by describing the features, fixes, and breaking changes made in commit messages. + +The commit message should be structured as follows: + +--- + +``` +[optional scope]: +[optional body] +[optional footer(s)] +``` +--- + +
+The commit contains the following structural elements, to communicate intent to the +consumers of your library: + +1. **fix:** a commit of the _type_ `fix` patches a bug in your codebase (this correlates with [`PATCH`](http://semver.org/#summary) in Semantic Versioning). +1. **feat:** a commit of the _type_ `feat` introduces a new feature to the codebase (this correlates with [`MINOR`](http://semver.org/#summary) in Semantic Versioning). +1. **BREAKING CHANGE:** a commit that has a footer `BREAKING CHANGE:`, or appends a `!` after the type/scope, introduces a breaking API change (correlating with [`MAJOR`](http://semver.org/#summary) in Semantic Versioning). +A BREAKING CHANGE can be part of commits of any _type_. +1. _types_ other than `fix:` and `feat:` are allowed, for example [@commitlint/config-conventional](https://github.com/conventional-changelog/commitlint/tree/master/%40commitlint/config-conventional) (based on the [the Angular convention](https://github.com/angular/angular/blob/22b96b9/CONTRIBUTING.md#-commit-message-guidelines)) recommends `build:`, `chore:`, + `ci:`, `docs:`, `style:`, `refactor:`, `perf:`, `test:`, and others. +1. _footers_ other than `BREAKING CHANGE: ` may be provided and follow a convention similar to + [git trailer format](https://git-scm.com/docs/git-interpret-trailers). + +Additional types are not mandated by the Conventional Commits specification, and have no implicit effect in Semantic Versioning (unless they include a BREAKING CHANGE). +

+A scope may be provided to a commit's type, to provide additional contextual information and is contained within parenthesis, e.g., `feat(parser): add ability to parse arrays`. + +## Examples + +### Commit message with description and breaking change footer +``` +feat: allow provided config object to extend other configs + +BREAKING CHANGE: `extends` key in config file is now used for extending other config files +``` + +### Commit message with `!` to draw attention to breaking change +``` +feat!: send an email to the customer when a product is shipped +``` + +### Commit message with scope and `!` to draw attention to breaking change +``` +feat(api)!: send an email to the customer when a product is shipped +``` + +### Commit message with both `!` and BREAKING CHANGE footer +``` +chore!: drop support for Node 6 + +BREAKING CHANGE: use JavaScript features not available in Node 6. +``` + +### Commit message with no body +``` +docs: correct spelling of CHANGELOG +``` + +### Commit message with scope +``` +feat(lang): add polish language +``` + +### Commit message with multi-paragraph body and multiple footers +``` +fix: prevent racing of requests + +Introduce a request id and a reference to latest request. Dismiss +incoming responses other than from latest request. + +Remove timeouts which were used to mitigate the racing issue but are +obsolete now. + +Reviewed-by: Z +Refs: #123 +``` + +## Specification + +The key words “MUST”, “MUST NOT”, “REQUIRED”, “SHALL”, “SHALL NOT”, “SHOULD”, “SHOULD NOT”, “RECOMMENDED”, “MAY”, and “OPTIONAL” in this document are to be interpreted as described in [RFC 2119](https://www.ietf.org/rfc/rfc2119.txt). + +1. Commits MUST be prefixed with a type, which consists of a noun, `feat`, `fix`, etc., followed + by the OPTIONAL scope, OPTIONAL `!`, and REQUIRED terminal colon and space. +1. The type `feat` MUST be used when a commit adds a new feature to your application or library. +1. The type `fix` MUST be used when a commit represents a bug fix for your application. +1. A scope MAY be provided after a type. A scope MUST consist of a noun describing a + section of the codebase surrounded by parenthesis, e.g., `fix(parser):` +1. A description MUST immediately follow the colon and space after the type/scope prefix. +The description is a short summary of the code changes, e.g., _fix: array parsing issue when multiple spaces were contained in string_. +1. A longer commit body MAY be provided after the short description, providing additional contextual information about the code changes. The body MUST begin one blank line after the description. +1. A commit body is free-form and MAY consist of any number of newline separated paragraphs. +1. One or more footers MAY be provided one blank line after the body. Each footer MUST consist of + a word token, followed by either a `:` or `#` separator, followed by a string value (this is inspired by the + [git trailer convention](https://git-scm.com/docs/git-interpret-trailers)). +1. A footer's token MUST use `-` in place of whitespace characters, e.g., `Acked-by` (this helps differentiate + the footer section from a multi-paragraph body). An exception is made for `BREAKING CHANGE`, which MAY also be used as a token. +1. A footer's value MAY contain spaces and newlines, and parsing MUST terminate when the next valid footer + token/separator pair is observed. +1. Breaking changes MUST be indicated in the type/scope prefix of a commit, or as an entry in the + footer. +1. If included as a footer, a breaking change MUST consist of the uppercase text BREAKING CHANGE, followed by a colon, space, and description, e.g., +_BREAKING CHANGE: environment variables now take precedence over config files_. +1. If included in the type/scope prefix, breaking changes MUST be indicated by a + `!` immediately before the `:`. If `!` is used, `BREAKING CHANGE:` MAY be omitted from the footer section, + and the commit description SHALL be used to describe the breaking change. +1. Types other than `feat` and `fix` MAY be used in your commit messages, e.g., _docs: updated ref docs._ +1. The units of information that make up Conventional Commits MUST NOT be treated as case sensitive by implementors, with the exception of BREAKING CHANGE which MUST be uppercase. +1. BREAKING-CHANGE MUST be synonymous with BREAKING CHANGE, when used as a token in a footer. + +## Why Use Conventional Commits + +* Automatically generating CHANGELOGs. +* Automatically determining a semantic version bump (based on the types of commits landed). +* Communicating the nature of changes to teammates, the public, and other stakeholders. +* Triggering build and publish processes. +* Making it easier for people to contribute to your projects, by allowing them to explore + a more structured commit history. diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..581342161 --- /dev/null +++ b/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,12 @@ +# Contributor Code of Conduct +As contributors and maintainers of this project, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. + +We are committed to making participation in this project a harassment-free experience for everyone, regardless of the level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, age, or religion. + +Examples of unacceptable behavior by participants include the use of sexual language or imagery, derogatory comments or personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed from the project team. + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the Contributor Covenant, version 1.0.0, available at http://contributor-covenant.org/version/1/0/0/ diff --git a/ENVEXAMPLE b/ENVEXAMPLE index a387785f6..650c70279 100644 --- a/ENVEXAMPLE +++ b/ENVEXAMPLE @@ -9,14 +9,19 @@ DB_PORT=5432 # Port for PostgreSQL (default: 5432) ENCRYPTION_KEY=f4d5e6a7b8c9d0e1f23456789abcdef01234567890abcdef123456789abcdef0 # Key for encrypting sensitive data (passwords and proxies) MINIO_ENDPOINT=minio # MinIO endpoint in Docker MINIO_PORT=9000 # Port for MinIO (default: 9000) +MINIO_CONSOLE_PORT=9001 # Web UI Port for MinIO (default: 9001) MINIO_ACCESS_KEY=minio_access_key # MinIO access key MINIO_SECRET_KEY=minio_secret_key # MinIO secret key REDIS_HOST=redis # Redis host in Docker REDIS_PORT=6379 # Redis port (default: 6379) -# Backend URLs -BACKEND_URL=http://localhost:8080 # Internal URL for backend service -VITE_BACKEND_URL=http://localhost:8080 # URL used by frontend to connect to backend +# Backend and Frontend URLs and Ports +BACKEND_PORT=8080 # Port to run backend on. Needed for Docker setup +FRONTEND_PORT=5173 # Port to run frontend on. Needed for Docker setup +BACKEND_URL=http://localhost:8080 # URL on which the backend runs. You can change it based on your needs. +PUBLIC_URL=http://localhost:5173 # URL on which the frontend runs. You can change it based on your needs. +VITE_BACKEND_URL=http://localhost:8080 # URL used by frontend to connect to backend. It should always have the same value as BACKEND_URL +VITE_PUBLIC_URL=http://localhost:5173 # URL used by backend to connect to frontend. It should always have the same value as PUBLIC_URL # Optional Google OAuth settings for Google Sheet Integration GOOGLE_CLIENT_ID=your_google_client_id diff --git a/README.md b/README.md index dd5624220..ac74d21cd 100644 --- a/README.md +++ b/README.md @@ -29,14 +29,15 @@ Maxun lets you train a robot in 2 minutes and scrape the web on auto-pilot. Web -> Note: We are in early stages of development and do not support self hosting yet. You can run Maxun locally. +> Note: Maxun is in its early stages of development and currently does not support self-hosting. However, you can run Maxun locally. Self-hosting capabilities are planned for a future release and will be available soon. -# Local Setup +# Local Installation ### Docker Compose ``` git clone https://github.com/getmaxun/maxun -docker-compose up -d --build +docker-compose up -d ``` +You can access the frontend at http://localhost:5173/ and backend at http://localhost:8080/ ### Without Docker 1. Ensure you have Node.js, PostgreSQL, MinIO and Redis installed on your system. @@ -76,8 +77,12 @@ You can access the frontend at http://localhost:5173/ and backend at http://loca | Variable | Mandatory | Description | If Not Set | |-----------------------|-----------|----------------------------------------------------------------------------------------------|--------------------------------------------------------------| +| `BACKEND_PORT` | Yes | Port to run backend on. Needed for Docker setup | Default value: 8080 | +| `FRONTEND_PORT` | Yes | Port to run frontend on. Needed for Docker setup | Default value: 5173 | | `BACKEND_URL` | Yes | URL to run backend on. | Default value: http://localhost:8080 | | `VITE_BACKEND_URL` | Yes | URL used by frontend to connect to backend | Default value: http://localhost:8080 | +| `PUBLIC_URL` | Yes | URL to run frontend on. | Default value: http://localhost:5173 | +| `VITE_PUBLIC_URL` | Yes | URL used by backend to connect to frontend | Default value: http://localhost:5173 | | `JWT_SECRET` | Yes | Secret key used to sign and verify JSON Web Tokens (JWTs) for authentication. | JWT authentication will not work. | | `DB_NAME` | Yes | Name of the Postgres database to connect to. | Database connection will fail. | | `DB_USER` | Yes | Username for Postgres database authentication. | Database connection will fail. | @@ -87,6 +92,7 @@ You can access the frontend at http://localhost:5173/ and backend at http://loca | `ENCRYPTION_KEY` | Yes | Key used for encrypting sensitive data (proxies, passwords). | Encryption functionality will not work. | | `MINIO_ENDPOINT` | Yes | Endpoint URL for MinIO, to store Robot Run Screenshots. | Connection to MinIO storage will fail. | | `MINIO_PORT` | Yes | Port number for MinIO service. | Connection to MinIO storage will fail. | +| `MINIO_CONSOLE_PORT` | No | Port number for MinIO WebUI service. Needed for Docker setup. | Cannot access MinIO Web UI. | | `MINIO_ACCESS_KEY` | Yes | Access key for authenticating with MinIO. | MinIO authentication will fail. | | `GOOGLE_CLIENT_ID` | No | Client ID for Google OAuth, used for Google Sheet integration authentication. | Google login will not work. | | `GOOGLE_CLIENT_SECRET`| No | Client Secret for Google OAuth. | Google login will not work. | diff --git a/docker-compose.yml b/docker-compose.yml index 8b26973bc..92b69c14c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,7 +8,7 @@ services: POSTGRES_PASSWORD: ${DB_PASSWORD} POSTGRES_DB: ${DB_NAME} ports: - - "5432:5432" + - "${DB_PORT:-5432}:${DB_PORT:-5432}" volumes: - postgres_data:/var/lib/postgresql/data healthcheck: @@ -23,7 +23,7 @@ services: REDIS_HOST: ${REDIS_HOST} REDIS_PORT: ${REDIS_PORT} ports: - - "6379:6379" + - "${REDIS_PORT:-6379}:${REDIS_PORT:-6379}" volumes: - redis_data:/data @@ -32,10 +32,10 @@ services: environment: MINIO_ROOT_USER: ${MINIO_ACCESS_KEY} MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY} - command: server /data --console-address :9001 + command: server /data --console-address :${MINIO_CONSOLE_PORT:-9001} ports: - - "9000:9000" # API port - - "9001:9001" # WebUI port + - "${MINIO_PORT:-9000}:${MINIO_PORT:-9000}" # API port + - "${MINIO_CONSOLE_PORT:-9001}:${MINIO_CONSOLE_PORT:-9001}" # WebUI port volumes: - minio_data:/data @@ -43,11 +43,12 @@ services: #build: #context: . #dockerfile: server/Dockerfile - image: getmaxun/maxun-backend:v0.0.2 + image: getmaxun/maxun-backend:v0.0.3 ports: - - "8080:8080" + - "${BACKEND_PORT:-8080}:${BACKEND_PORT:-8080}" env_file: .env environment: + BACKEND_URL: ${BACKEND_URL} # to ensure Playwright works in Docker PLAYWRIGHT_BROWSERS_PATH: /ms-playwright PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 0 @@ -56,9 +57,8 @@ services: CHROMIUM_FLAGS: '--disable-gpu --no-sandbox --headless=new' security_opt: - seccomp=unconfined # This might help with browser sandbox issues - # Increase shared memory size for Chromium - shm_size: '2gb' - mem_limit: 2g # Set a 2GB memory limit + shm_size: '2gb' # Increase shared memory size for Chromium + mem_limit: 2g # Set a 2GB memory limit depends_on: - postgres - redis @@ -72,13 +72,16 @@ services: #build: #context: . #dockerfile: Dockerfile - image: getmaxun/maxun-frontend:v0.0.1 + image: getmaxun/maxun-frontend:v0.0.2 ports: - - "5173:5173" + - "${FRONTEND_PORT:-5173}:${FRONTEND_PORT:-5173}" env_file: .env + environment: + PUBLIC_URL: ${PUBLIC_URL} + BACKEND_URL: ${BACKEND_URL} volumes: - ./:/app # Mount entire frontend app directory for hot reloading - - /app/node_modules # Anonymous volume to prevent overwriting node_modules + - /app/node_modules # Anonymous volume to prevent overwriting node_modules depends_on: - backend diff --git a/esbuild.config.js b/esbuild.config.js deleted file mode 100644 index e69de29bb..000000000 diff --git a/maxun-core/src/browserSide/scraper.js b/maxun-core/src/browserSide/scraper.js index 828a4f849..369a08be8 100644 --- a/maxun-core/src/browserSide/scraper.js +++ b/maxun-core/src/browserSide/scraper.js @@ -249,7 +249,7 @@ function scrapableHeuristics(maxCountPerPage = 50, minArea = 20000, scrolls = 3, } }, (key) => key // Use the original key in the output - )); + )) || []; } /** diff --git a/maxun-core/src/interpret.ts b/maxun-core/src/interpret.ts index ecef02dbb..a7a5de47e 100644 --- a/maxun-core/src/interpret.ts +++ b/maxun-core/src/interpret.ts @@ -16,6 +16,23 @@ import Concurrency from './utils/concurrency'; import Preprocessor from './preprocessor'; import log, { Level } from './utils/logger'; +/** + * Extending the Window interface for custom scraping functions. + */ +declare global { + interface Window { + scrape: (selector: string | null) => Record[]; + scrapeSchema: ( + schema: Record + ) => Record; + scrapeList: (config: { listSelector: string; fields: any; limit?: number; pagination: any }) => Record[]; + scrapeListAuto: (listSelector: string) => { selector: string; innerText: string }[]; + scrollDown: (pages?: number) => void; + scrollUp: (pages?: number) => void; + } +} + + /** * Defines optional intepreter options (passed in constructor) */ @@ -31,7 +48,6 @@ interface InterpreterOptions { }> } - /** * Class for running the Smart Workflows. */ @@ -50,6 +66,8 @@ export default class Interpreter extends EventEmitter { private blocker: PlaywrightBlocker | null = null; + private cumulativeResults: Record[] = []; + constructor(workflow: WorkflowFile, options?: Partial) { super(); this.workflow = workflow.workflow; @@ -57,7 +75,9 @@ export default class Interpreter extends EventEmitter { this.options = { maxRepeats: 5, maxConcurrency: 5, - serializableCallback: (data) => { log(JSON.stringify(data), Level.WARN); }, + serializableCallback: (data) => { + log(JSON.stringify(data), Level.WARN); + }, binaryCallback: () => { log('Received binary data, thrashing them.', Level.WARN); }, debug: false, debugChannel: {}, @@ -214,11 +234,11 @@ export default class Interpreter extends EventEmitter { // every condition is treated as a single context switch (key as keyof typeof operators) { - case '$and': + case '$and' as keyof typeof operators: return array?.every((x) => this.applicable(x, context)); - case '$or': + case '$or' as keyof typeof operators: return array?.some((x) => this.applicable(x, context)); - case '$not': + case '$not' as keyof typeof operators: return !this.applicable(value, context); // $not should be a unary operator default: throw new Error('Undefined logic operator.'); @@ -233,9 +253,9 @@ export default class Interpreter extends EventEmitter { }; switch (key as keyof typeof meta) { - case '$before': + case '$before' as keyof typeof meta: return !usedActions.find(testRegexString); - case '$after': + case '$after' as keyof typeof meta: return !!usedActions.find(testRegexString); default: throw new Error('Undefined meta operator.'); @@ -308,9 +328,43 @@ export default class Interpreter extends EventEmitter { scrapeSchema: async (schema: Record) => { await this.ensureScriptsLoaded(page); - + const scrapeResult = await page.evaluate((schemaObj) => window.scrapeSchema(schemaObj), schema); - await this.options.serializableCallback(scrapeResult); + + const newResults = Array.isArray(scrapeResult) ? scrapeResult : [scrapeResult]; + newResults.forEach((result) => { + Object.entries(result).forEach(([key, value]) => { + const keyExists = this.cumulativeResults.some( + (item) => key in item && item[key] !== undefined + ); + + if (!keyExists) { + this.cumulativeResults.push({ [key]: value }); + } + }); + }); + + const mergedResult: Record[] = [ + Object.fromEntries( + Object.entries( + this.cumulativeResults.reduce((acc, curr) => { + Object.entries(curr).forEach(([key, value]) => { + // If the key doesn't exist or the current value is not undefined, add/update it + if (value !== undefined) { + acc[key] = value; + } + }); + return acc; + }, {}) + ) + ) + ]; + + // Log cumulative results after each action + console.log("CUMULATIVE results:", this.cumulativeResults); + console.log("MERGED results:", mergedResult); + + await this.options.serializableCallback(mergedResult); }, scrapeList: async (config: { listSelector: string, fields: any, limit?: number, pagination: any }) => { @@ -357,7 +411,7 @@ export default class Interpreter extends EventEmitter { }; for (const step of steps) { - this.log(`Launching ${step.action}`, Level.LOG); + this.log(`Launching ${String(step.action)}`, Level.LOG); if (step.action in wawActions) { // "Arrayifying" here should not be needed (TS + syntax checker - only arrays; but why not) @@ -365,7 +419,7 @@ export default class Interpreter extends EventEmitter { await wawActions[step.action as CustomFunctions](...(params ?? [])); } else { // Implements the dot notation for the "method name" in the workflow - const levels = step.action.split('.'); + const levels = String(step.action).split('.'); const methodName = levels[levels.length - 1]; let invokee: any = page; @@ -534,9 +588,14 @@ export default class Interpreter extends EventEmitter { if (this.options.debug) { this.log(`Current state is: \n${JSON.stringify(pageState, null, 2)}`, Level.WARN); } - const actionId = workflow.findIndex( - (step) => this.applicable(step.where, pageState, usedActions), - ); + + const actionId = workflow.findIndex((step) => { + const isApplicable = this.applicable(step.where, pageState, usedActions); + console.log(`Where:`, step.where); + console.log(`Page state:`, pageState); + console.log(`Match result: ${isApplicable}`); + return isApplicable; + }); const action = workflow[actionId]; diff --git a/server/src/api/record.ts b/server/src/api/record.ts index fef0be9af..5b33b12f6 100644 --- a/server/src/api/record.ts +++ b/server/src/api/record.ts @@ -621,7 +621,7 @@ async function executeRun(id: string) { }; } catch (error: any) { - logger.log('info', `Error while running a recording with id: ${id} - ${error.message}`); + logger.log('info', `Error while running a robot with id: ${id} - ${error.message}`); const run = await Run.findOne({ where: { runId: id } }); if (run) { await run.update({ @@ -660,7 +660,7 @@ export async function handleRunRecording(id: string, userId: string) { socket.on('ready-for-run', () => readyForRunHandler(browserId, newRunId)); - logger.log('info', `Running recording: ${id}`); + logger.log('info', `Running Robot: ${id}`); socket.on('disconnect', () => { cleanupSocketListeners(socket, browserId, newRunId); @@ -670,7 +670,7 @@ export async function handleRunRecording(id: string, userId: string) { return newRunId; } catch (error: any) { - logger.error('Error running recording:', error); + logger.error('Error running robot:', error); } } diff --git a/server/src/browser-management/classes/RemoteBrowser.ts b/server/src/browser-management/classes/RemoteBrowser.ts index 07ea8780b..769787da7 100644 --- a/server/src/browser-management/classes/RemoteBrowser.ts +++ b/server/src/browser-management/classes/RemoteBrowser.ts @@ -165,6 +165,13 @@ export class RemoteBrowser { contextOptions.userAgent = browserUserAgent; this.context = await this.browser.newContext(contextOptions); this.currentPage = await this.context.newPage(); + + this.currentPage.on('framenavigated', (frame) => { + if (frame === this.currentPage?.mainFrame()) { + this.socket.emit('urlChanged', this.currentPage.url()); + } + }); + // await this.currentPage.setExtraHTTPHeaders({ // 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3' // }); @@ -243,8 +250,8 @@ export class RemoteBrowser { return; } await this.client.send('Page.screencastFrameAck', { sessionId: sessionId }); - } catch (e) { - logger.log('error', e); + } catch (e: any) { + logger.log('error', `Screencast error: ${e}`); } }, 100); }); @@ -278,7 +285,7 @@ export class RemoteBrowser { } } catch (e) { const { message } = e as Error; - logger.log('error', message); + logger.log('error', `Screenshot error: ${message}`); } }; @@ -362,6 +369,13 @@ export class RemoteBrowser { if (page) { await this.stopScreencast(); this.currentPage = page; + + this.currentPage.on('framenavigated', (frame) => { + if (frame === this.currentPage?.mainFrame()) { + this.socket.emit('urlChanged', this.currentPage.url()); + } + }); + //await this.currentPage.setViewportSize({ height: 400, width: 900 }) this.client = await this.currentPage.context().newCDPSession(this.currentPage); this.socket.emit('urlChanged', this.currentPage.url()); @@ -388,9 +402,14 @@ export class RemoteBrowser { await this.currentPage?.close(); this.currentPage = newPage; if (this.currentPage) { - this.currentPage.on('load', (page) => { - this.socket.emit('urlChanged', page.url()); - }) + this.currentPage.on('framenavigated', (frame) => { + if (frame === this.currentPage?.mainFrame()) { + this.socket.emit('urlChanged', this.currentPage.url()); + } + }); + // this.currentPage.on('load', (page) => { + // this.socket.emit('urlChanged', page.url()); + // }) this.client = await this.currentPage.context().newCDPSession(this.currentPage); await this.subscribeToScreencast(); } else { diff --git a/server/src/constants/config.ts b/server/src/constants/config.ts index 74d9de4cb..1943fbe4a 100644 --- a/server/src/constants/config.ts +++ b/server/src/constants/config.ts @@ -1,4 +1,4 @@ -export const SERVER_PORT = process.env.SERVER_PORT ? Number(process.env.SERVER_PORT) : 8080 +export const SERVER_PORT = process.env.BACKEND_PORT ? Number(process.env.BACKEND_PORT) : 8080 export const DEBUG = process.env.DEBUG === 'true' export const LOGS_PATH = process.env.LOGS_PATH ?? 'server/logs' export const ANALYTICS_ID = 'oss' \ No newline at end of file diff --git a/server/src/routes/auth.ts b/server/src/routes/auth.ts index 692add99e..cc3d879bd 100644 --- a/server/src/routes/auth.ts +++ b/server/src/routes/auth.ts @@ -384,7 +384,7 @@ router.get( httpOnly: false, maxAge: 60000, }); - res.redirect(`http://localhost:5173`); + res.redirect(process.env.PUBLIC_URL as string || "http://localhost:5173"); } catch (error: any) { res.status(500).json({ message: `Google OAuth error: ${error.message}` }); } diff --git a/server/src/routes/storage.ts b/server/src/routes/storage.ts index f84583d0b..d1f648f86 100644 --- a/server/src/routes/storage.ts +++ b/server/src/routes/storage.ts @@ -38,7 +38,7 @@ router.get('/recordings', requireSignIn, async (req, res) => { const data = await Robot.findAll(); return res.send(data); } catch (e) { - logger.log('info', 'Error while reading recordings'); + logger.log('info', 'Error while reading robots'); return res.send(null); } }); @@ -55,7 +55,7 @@ router.get('/recordings/:id', requireSignIn, async (req, res) => { ); return res.send(data); } catch (e) { - logger.log('info', 'Error while reading recordings'); + logger.log('info', 'Error while reading robots'); return res.send(null); } }) @@ -400,7 +400,7 @@ router.put('/runs/:id', requireSignIn, async (req: AuthenticatedRequest, res) => }); } catch (e) { const { message } = e as Error; - logger.log('info', `Error while creating a run with recording id: ${req.params.id} - ${message}`); + logger.log('info', `Error while creating a run with robot id: ${req.params.id} - ${message}`); return res.send(''); } }); @@ -518,7 +518,7 @@ router.post('/runs/run/:id', requireSignIn, async (req: AuthenticatedRequest, re finishedAt: new Date().toLocaleString(), }); } - logger.log('info', `Error while running a recording with id: ${req.params.id} - ${message}`); + logger.log('info', `Error while running a robot with id: ${req.params.id} - ${message}`); capture( 'maxun-oss-run-created-manual', { @@ -757,7 +757,7 @@ router.post('/runs/abort/:id', requireSignIn, async (req, res) => { return res.send(true); } catch (e) { const { message } = e as Error; - logger.log('info', `Error while running a recording with name: ${req.params.fileName}_${req.params.runId}.json`); + logger.log('info', `Error while running a robot with name: ${req.params.fileName}_${req.params.runId}.json`); return res.send(false); } }); \ No newline at end of file diff --git a/server/src/server.ts b/server/src/server.ts index e6fee5f29..8c28c2d26 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -21,7 +21,7 @@ import swaggerSpec from './swagger/config'; const app = express(); app.use(cors({ - origin: 'http://localhost:5173', + origin: process.env.PUBLIC_URL ? process.env.PUBLIC_URL : 'http://localhost:5173', credentials: true, })); app.use(express.json()); @@ -92,9 +92,10 @@ app.get('/', function (req, res) { // Add CORS headers app.use((req, res, next) => { - res.header('Access-Control-Allow-Origin', '*'); + res.header('Access-Control-Allow-Origin', process.env.PUBLIC_URL || 'http://localhost:5173'); res.header('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS'); res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization'); + res.header('Access-Control-Allow-Credentials', 'true'); if (req.method === 'OPTIONS') { return res.sendStatus(200); } diff --git a/server/src/workflow-management/classes/Generator.ts b/server/src/workflow-management/classes/Generator.ts index 7801a20ec..cfef4a309 100644 --- a/server/src/workflow-management/classes/Generator.ts +++ b/server/src/workflow-management/classes/Generator.ts @@ -22,6 +22,7 @@ import { getBestSelectorForAction } from "../utils"; import { browserPool } from "../../server"; import { uuid } from "uuidv4"; import { capture } from "../../utils/analytics" +import { encrypt } from "../../utils/auth"; interface PersistedGeneratedData { lastUsedSelector: string; @@ -159,6 +160,55 @@ export class WorkflowGenerator { }) }; + /** + * New function to handle actionable check for scrapeList + * @param page The current Playwright Page object. + * @param config The scrapeList configuration object. + * @returns {Promise} Array of actionable selectors. + */ + private async getSelectorsForScrapeList(page: Page, config: { + listSelector: string; + fields: any; + limit?: number; + pagination: any; + }): Promise { + const { listSelector } = config; + + // Verify if the selectors are present and actionable on the current page + const actionableSelectors: string[] = []; + if (listSelector) { + const isActionable = await page.isVisible(listSelector).catch(() => false); + if (isActionable) { + actionableSelectors.push(listSelector); + logger.log('debug', `List selector ${listSelector} is actionable.`); + } else { + logger.log('warn', `List selector ${listSelector} is not visible on the page.`); + } + } + + return actionableSelectors; + } + + /** + * New function to handle actionable check for scrapeList + * @param page The current Playwright Page object. + * @param schema The scrapeSchema configuration object. + * @returns {Promise} Array of actionable selectors. + */ + private async getSelectorsForSchema(page: Page, schema: Record): Promise { + const selectors = Object.values(schema).map((field) => field.selector); + + // Verify if the selectors are present and actionable on the current page + const actionableSelectors: string[] = []; + for (const selector of selectors) { + const isActionable = await page.isVisible(selector).catch(() => false); + if (isActionable) { + actionableSelectors.push(selector); + } + } + return actionableSelectors; + } + /** * Adds a newly generated pair to the workflow and notifies the client about it by * sending the updated workflow through socket. @@ -184,55 +234,67 @@ export class WorkflowGenerator { */ private addPairToWorkflowAndNotifyClient = async (pair: WhereWhatPair, page: Page) => { let matched = false; - // validate if a pair with the same where conditions is already present in the workflow + + // Check for scrapeSchema actions and enhance the where condition + if (pair.what[0].action === 'scrapeSchema') { + const schema = pair.what[0]?.args?.[0]; + if (schema) { + const additionalSelectors = await this.getSelectorsForSchema(page, schema); + pair.where.selectors = [...(pair.where.selectors || []), ...additionalSelectors]; + } + } + + if (pair.what[0].action === 'scrapeList') { + const config = pair.what[0]?.args?.[0]; + if (config) { + const actionableSelectors = await this.getSelectorsForScrapeList(page, config); + pair.where.selectors = [...(pair.where.selectors || []), ...actionableSelectors]; + } + } + + // Validate if the pair is already in the workflow if (pair.where.selectors && pair.where.selectors[0]) { const match = selectorAlreadyInWorkflow(pair.where.selectors[0], this.workflowRecord.workflow); if (match) { - // if a match of where conditions is found, the new action is added into the matched rule const matchedIndex = this.workflowRecord.workflow.indexOf(match); if (pair.what[0].action !== 'waitForLoadState' && pair.what[0].action !== 'press') { pair.what.push({ action: 'waitForLoadState', args: ['networkidle'], - }) + }); } this.workflowRecord.workflow[matchedIndex].what = this.workflowRecord.workflow[matchedIndex].what.concat(pair.what); - logger.log('info', `Pushed ${JSON.stringify(this.workflowRecord.workflow[matchedIndex])} to workflow pair`); matched = true; } } - // is the where conditions of the pair are not already in the workflow, we need to validate the where conditions - // for possible overshadowing of different rules and handle cases according to the recording logic + + // Handle cases where the where condition isn't already present if (!matched) { const handled = await this.handleOverShadowing(pair, page, this.generatedData.lastIndex || 0); if (!handled) { - //adding waitForLoadState with networkidle, for better success rate of automatically recorded workflows if (pair.what[0].action !== 'waitForLoadState' && pair.what[0].action !== 'press') { pair.what.push({ action: 'waitForLoadState', args: ['networkidle'], - }) + }); } if (this.generatedData.lastIndex === 0) { this.generatedData.lastIndex = null; - // we want to have the most specific selectors at the beginning of the workflow this.workflowRecord.workflow.unshift(pair); } else { this.workflowRecord.workflow.splice(this.generatedData.lastIndex || 0, 0, pair); if (this.generatedData.lastIndex) { - this.generatedData.lastIndex = this.generatedData.lastIndex - 1; + this.generatedData.lastIndex -= 1; } } - logger.log('info', - `${JSON.stringify(pair)}: Added to workflow file on index: ${this.generatedData.lastIndex || 0}`); - } else { - logger.log('debug', - ` ${JSON.stringify(this.workflowRecord.workflow[this.generatedData.lastIndex || 0])} added action to workflow pair`); } } + + // Emit the updated workflow to the client this.socket.emit('workflow', this.workflowRecord); logger.log('info', `Workflow emitted`); }; + /** * Generates a pair for the click event. @@ -300,7 +362,7 @@ export class WorkflowGenerator { where, what: [{ action: 'press', - args: [selector, key], + args: [selector, encrypt(key)], }], } if (selector) { @@ -797,7 +859,7 @@ export class WorkflowGenerator { // when more than one press action is present, add a type action pair.what.splice(index - input.actionCounter, input.actionCounter, { action: 'type', - args: [input.selector, input.value], + args: [input.selector, encrypt(input.value)], }, { action: 'waitForLoadState', args: ['networkidle'], diff --git a/server/src/workflow-management/classes/Interpreter.ts b/server/src/workflow-management/classes/Interpreter.ts index fa5e9332b..d53259b7d 100644 --- a/server/src/workflow-management/classes/Interpreter.ts +++ b/server/src/workflow-management/classes/Interpreter.ts @@ -3,6 +3,38 @@ import logger from "../../logger"; import { Socket } from "socket.io"; import { Page } from "playwright"; import { InterpreterSettings } from "../../types"; +import { decrypt } from "../../utils/auth"; + +/** + * Decrypts any encrypted inputs in the workflow. + * @param workflow The workflow to decrypt. + */ +function decryptWorkflow(workflow: WorkflowFile): WorkflowFile { + const decryptedWorkflow = JSON.parse(JSON.stringify(workflow)) as WorkflowFile; + + decryptedWorkflow.workflow.forEach((pair) => { + pair.what.forEach((action) => { + if ((action.action === 'type' || action.action === 'press') && Array.isArray(action.args) && action.args.length > 1) { + try { + const encryptedValue = action.args[1]; + if (typeof encryptedValue === 'string') { + const decryptedValue = decrypt(encryptedValue); + action.args[1] = decryptedValue; + } else { + logger.log('error', 'Encrypted value is not a string'); + action.args[1] = ''; + } + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : String(error); + logger.log('error', `Failed to decrypt input value: ${errorMessage}`); + action.args[1] = ''; + } + } + }); + }); + + return decryptedWorkflow; +} /** * This class implements the main interpretation functions. @@ -123,6 +155,9 @@ export class WorkflowInterpreter { ) => { const params = settings.params ? settings.params : null; delete settings.params; + + const decryptedWorkflow = decryptWorkflow(workflow); + const options = { ...settings, debugChannel: { @@ -143,7 +178,7 @@ export class WorkflowInterpreter { } } - const interpreter = new Interpreter(workflow, options); + const interpreter = new Interpreter(decryptedWorkflow, options); this.interpreter = interpreter; interpreter.on('flag', async (page, resume) => { @@ -212,6 +247,9 @@ export class WorkflowInterpreter { public InterpretRecording = async (workflow: WorkflowFile, page: Page, settings: InterpreterSettings) => { const params = settings.params ? settings.params : null; delete settings.params; + + const decryptedWorkflow = decryptWorkflow(workflow); + const options = { ...settings, debugChannel: { @@ -234,15 +272,19 @@ export class WorkflowInterpreter { } } - const interpreter = new Interpreter(workflow, options); + const interpreter = new Interpreter(decryptedWorkflow, options); this.interpreter = interpreter; const status = await interpreter.run(page, params); + const lastArray = this.serializableData.length > 1 + ? [this.serializableData[this.serializableData.length - 1]] + : this.serializableData; + const result = { log: this.debugMessages, result: status, - serializableOutput: this.serializableData.reduce((reducedObject, item, index) => { + serializableOutput: lastArray.reduce((reducedObject, item, index) => { return { [`item-${index}`]: item, ...reducedObject, diff --git a/server/src/workflow-management/scheduler/index.ts b/server/src/workflow-management/scheduler/index.ts index 082fcf2ed..02ca905fc 100644 --- a/server/src/workflow-management/scheduler/index.ts +++ b/server/src/workflow-management/scheduler/index.ts @@ -171,7 +171,7 @@ async function executeRun(id: string) { processGoogleSheetUpdates(); return true; } catch (error: any) { - logger.log('info', `Error while running a recording with id: ${id} - ${error.message}`); + logger.log('info', `Error while running a robot with id: ${id} - ${error.message}`); console.log(error.message); const run = await Run.findOne({ where: { runId: id } }); if (run) { @@ -232,7 +232,7 @@ export async function handleRunRecording(id: string, userId: string) { socket.on('ready-for-run', () => readyForRunHandler(browserId, newRunId)); - logger.log('info', `Running recording: ${id}`); + logger.log('info', `Running robot: ${id}`); socket.on('disconnect', () => { cleanupSocketListeners(socket, browserId, newRunId); diff --git a/src/components/molecules/InterpretationButtons.tsx b/src/components/molecules/InterpretationButtons.tsx index 0723bac2f..9d9837611 100644 --- a/src/components/molecules/InterpretationButtons.tsx +++ b/src/components/molecules/InterpretationButtons.tsx @@ -1,4 +1,4 @@ -import { Box, Button, Stack, Typography } from "@mui/material"; +import { Box, Button, Stack, Typography, CircularProgress } from "@mui/material"; import { PlayCircle } from "@mui/icons-material"; import React, { useCallback, useEffect, useState } from "react"; import { interpretCurrentRecording, stopCurrentInterpretation } from "../../api/recording"; @@ -105,9 +105,9 @@ export const InterpretationButtons = ({ enableStepping }: InterpretationButtonsP const finished = await interpretCurrentRecording(); setInfo({ ...info, running: false }); if (finished) { - notify('info', 'Interpretation finished'); + notify('info', 'Run finished'); } else { - notify('error', 'Interpretation failed to start'); + notify('error', 'Run failed to start'); } } }; @@ -139,7 +139,9 @@ export const InterpretationButtons = ({ enableStepping }: InterpretationButtonsP disabled={info.running} sx={{ display: 'grid' }} > - {info.running ? 'Extracting data...please wait' : 'Get Preview of Output Data'} + {info.running ? + Extracting data...please wait for 10secs to 1min + : 'Get Preview of Output Data'} { }} diff --git a/src/components/molecules/NavBar.tsx b/src/components/molecules/NavBar.tsx index 4c0b7296a..ee8c80e8c 100644 --- a/src/components/molecules/NavBar.tsx +++ b/src/components/molecules/NavBar.tsx @@ -58,7 +58,6 @@ export const NavBar: React.FC = ({ recordingName, isRecording }) => }}>
Maxun
- { user ? ( diff --git a/src/components/molecules/RecordingsTable.tsx b/src/components/molecules/RecordingsTable.tsx index e9f0aebc2..651d3677f 100644 --- a/src/components/molecules/RecordingsTable.tsx +++ b/src/components/molecules/RecordingsTable.tsx @@ -151,9 +151,6 @@ export const RecordingsTable = ({ handleEditRecording, handleRunRecording, handl row.name.toLowerCase().includes(searchTerm.toLowerCase()) ); - - - return ( @@ -249,25 +246,25 @@ export const RecordingsTable = ({ handleEditRecording, handleRunRecording, handl handleEditRobot(row.id, row.name, row.params || [])} + handleDuplicate={() => { + handleDuplicateRobot(row.id, row.name, row.params || []); + }} handleDelete={() => { checkRunsForRecording(row.id).then((result: boolean) => { if (result) { - notify('warning', 'Cannot delete recording as it has active runs'); + notify('warning', 'Cannot delete robot as it has associated runs'); } }) deleteRecordingFromStorage(row.id).then((result: boolean) => { if (result) { setRows([]); - notify('success', 'Recording deleted successfully'); + notify('success', 'Robot deleted successfully'); fetchRecordings(); } }) }} - handleDuplicate={() => { - handleDuplicateRobot(row.id, row.name, row.params || []); - }} /> ); @@ -420,18 +417,18 @@ const OptionsButton = ({ handleEdit, handleDelete, handleDuplicate }: OptionsBut Edit - { handleDelete(); handleClose(); }}> - - - - Delete - { handleDuplicate(); handleClose(); }}> Duplicate + { handleDelete(); handleClose(); }}> + + + + Delete + ); diff --git a/src/components/molecules/RobotEdit.tsx b/src/components/molecules/RobotEdit.tsx index 74b50f626..9441ecefa 100644 --- a/src/components/molecules/RobotEdit.tsx +++ b/src/components/molecules/RobotEdit.tsx @@ -155,9 +155,13 @@ export const RobotEditModal = ({ isOpen, handleStart, handleClose, initialSettin label="Robot Limit" type="number" value={robot.recording.workflow[0].what[0].args[0].limit || ''} - onChange={(e) => - handleLimitChange(parseInt(e.target.value, 10) || 0) - } + onChange={(e) =>{ + const value = parseInt(e.target.value, 10); + if (value >= 1) { + handleLimitChange(value); + } + }} + inputProps={{ min: 1 }} style={{ marginBottom: '20px' }} /> )} diff --git a/src/components/molecules/SaveRecording.tsx b/src/components/molecules/SaveRecording.tsx index 60ef3fa68..cfebc867b 100644 --- a/src/components/molecules/SaveRecording.tsx +++ b/src/components/molecules/SaveRecording.tsx @@ -46,7 +46,7 @@ export const SaveRecording = ({ fileName }: SaveRecordingProps) => { }; const exitRecording = useCallback(async () => { - notify('success', 'Recording saved successfully'); + notify('success', 'Robot saved successfully'); if (browserId) { await stopRecording(browserId); } diff --git a/src/components/organisms/ApiKey.tsx b/src/components/organisms/ApiKey.tsx index 675edb726..e6a00a914 100644 --- a/src/components/organisms/ApiKey.tsx +++ b/src/components/organisms/ApiKey.tsx @@ -36,9 +36,9 @@ const ApiKeyManager = () => { const [copySuccess, setCopySuccess] = useState(false); const { notify } = useGlobalInfoStore(); - - + + useEffect(() => { const fetchApiKey = async () => { @@ -53,7 +53,7 @@ const ApiKeyManager = () => { }; fetchApiKey(); - + }, []); const generateApiKey = async () => { @@ -61,7 +61,7 @@ const ApiKeyManager = () => { try { const { data } = await axios.post(`${apiUrl}/auth/generate-api-key`); setApiKey(data.api_key); - + notify('success', `Generated API Key successfully`); } catch (error: any) { notify('error', `Failed to generate API Key - ${error.message}`); @@ -88,11 +88,25 @@ const ApiKeyManager = () => { navigator.clipboard.writeText(apiKey); setCopySuccess(true); setTimeout(() => setCopySuccess(false), 2000); - notify('info', 'Copied to clipboard'); + notify('info', 'Copied API Key successfully'); } }; - if (loading) return ; + if (loading) { + return ( + + + + ); + } return ( diff --git a/src/components/organisms/RightSidePanel.tsx b/src/components/organisms/RightSidePanel.tsx index a11989bd8..4aaf7b214 100644 --- a/src/components/organisms/RightSidePanel.tsx +++ b/src/components/organisms/RightSidePanel.tsx @@ -54,6 +54,7 @@ export const RightSidePanel: React.FC = ({ onFinishCapture const [showCaptureScreenshot, setShowCaptureScreenshot] = useState(true); const [showCaptureText, setShowCaptureText] = useState(true); const [hoverStates, setHoverStates] = useState<{ [id: string]: boolean }>({}); + const [browserStepIdList, setBrowserStepIdList] = useState([]); const { lastAction, notify, currentWorkflowActionsState, setCurrentWorkflowActionsState } = useGlobalInfoStore(); const { getText, startGetText, stopGetText, getScreenshot, startGetScreenshot, stopGetScreenshot, getList, startGetList, stopGetList, startPaginationMode, stopPaginationMode, paginationType, updatePaginationType, limitType, customLimit, updateLimitType, updateCustomLimit, stopLimitMode, startLimitMode, captureStage, setCaptureStage } = useActionContext(); @@ -195,12 +196,18 @@ export const RightSidePanel: React.FC = ({ onFinishCapture const getTextSettingsObject = useCallback(() => { const settings: Record = {}; browserSteps.forEach(step => { + if (browserStepIdList.includes(step.id)) { + return; + } + if (step.type === 'text' && step.label && step.selectorObj?.selector) { settings[step.label] = step.selectorObj; } + setBrowserStepIdList(prevList => [...prevList, step.id]); }); + return settings; - }, [browserSteps]); + }, [browserSteps, browserStepIdList]); const stopCaptureAndEmitGetTextSettings = useCallback(() => { @@ -211,6 +218,7 @@ export const RightSidePanel: React.FC = ({ onFinishCapture } stopGetText(); const settings = getTextSettingsObject(); + console.log("SETTINGS", settings); const hasTextSteps = browserSteps.some(step => step.type === 'text'); if (hasTextSteps) { socket?.emit('action', { action: 'scrapeSchema', settings }); diff --git a/src/pages/MainPage.tsx b/src/pages/MainPage.tsx index 6ce7efe88..8af3d3c5e 100644 --- a/src/pages/MainPage.tsx +++ b/src/pages/MainPage.tsx @@ -49,10 +49,10 @@ export const MainPage = ({ handleEditRecording }: MainPageProps) => { aborted = true; notifyAboutAbort(runId).then(async (response) => { if (response) { - notify('success', `Interpretation of ${runningRecordingName} aborted successfully`); + notify('success', `Interpretation of robot ${runningRecordingName} aborted successfully`); await stopRecording(ids.browserId); } else { - notify('error', `Failed to abort the interpretation ${runningRecordingName} recording`); + notify('error', `Failed to abort the interpretation of ${runningRecordingName} robot`); } }) } @@ -67,9 +67,9 @@ export const MainPage = ({ handleEditRecording }: MainPageProps) => { interpretStoredRecording(runId).then(async (interpretation: boolean) => { if (!aborted) { if (interpretation) { - notify('success', `Interpretation of ${runningRecordingName} succeeded`); + notify('success', `Interpretation of robot ${runningRecordingName} succeeded`); } else { - notify('success', `Failed to interpret ${runningRecordingName} recording`); + notify('success', `Failed to interpret ${runningRecordingName} robot`); // destroy the created browser await stopRecording(browserId); } @@ -98,9 +98,9 @@ export const MainPage = ({ handleEditRecording }: MainPageProps) => { socket.on('debugMessage', debugMessageHandler); setContent('runs'); if (browserId) { - notify('info', `Running recording: ${runningRecordingName}`); + notify('info', `Running robot: ${runningRecordingName}`); } else { - notify('error', `Failed to run recording: ${runningRecordingName}`); + notify('error', `Failed to run robot: ${runningRecordingName}`); } }) return (socket: Socket, browserId: string, runId: string) => { @@ -113,9 +113,9 @@ export const MainPage = ({ handleEditRecording }: MainPageProps) => { scheduleStoredRecording(runningRecordingId, settings) .then(({ message, runId }: ScheduleRunResponse) => { if (message === 'success') { - notify('success', `Recording ${runningRecordingName} scheduled successfully`); + notify('success', `Robot ${runningRecordingName} scheduled successfully`); } else { - notify('error', `Failed to schedule recording ${runningRecordingName}`); + notify('error', `Failed to schedule robot ${runningRecordingName}`); } }); } diff --git a/src/pages/Register.tsx b/src/pages/Register.tsx index b2a3eebf6..c64de4aed 100644 --- a/src/pages/Register.tsx +++ b/src/pages/Register.tsx @@ -44,7 +44,7 @@ const Register = () => { window.localStorage.setItem("user", JSON.stringify(data)); navigate("/"); } catch (error:any) { - notify("error", error.response.data || "Registration Failed. Please try again."); + notify("error", `Registration Failed. Please try again. ${error.response.data}`); setLoading(false); } }; diff --git a/vite.config.js b/vite.config.js index 59f495a1e..9ca574ebb 100644 --- a/vite.config.js +++ b/vite.config.js @@ -1,11 +1,20 @@ import { defineConfig } from 'vite'; import react from '@vitejs/plugin-react'; +import dotenv from 'dotenv'; +dotenv.config(); export default defineConfig(() => { + const publicUrl = process.env.VITE_PUBLIC_URL || 'http://localhost:5173'; + return { define: { 'import.meta.env.VITE_BACKEND_URL': JSON.stringify(process.env.VITE_BACKEND_URL), + 'import.meta.env.VITE_PUBLIC_URL': JSON.stringify(publicUrl), }, + server: { + host: new URL(publicUrl).hostname, + port: parseInt(new URL(publicUrl).port), + }, build: { outDir: 'build', manifest: true,