added resume functionality
parent
78e2906c5e
commit
0d924aa300
|
@ -21,7 +21,7 @@ const utils = require("./utils");
|
|||
const config = JSON.parse(fs.readFileSync(__dirname + "/config.json"));
|
||||
const environment = process.env["ENVIRONMENT"];
|
||||
const cryptoConfig = utils.getCryptoConfig();
|
||||
let rates = [];
|
||||
const state = JSON.parse(fs.readFileSync(__dirname + "/state.json"));
|
||||
|
||||
const email = utils.decryptString(
|
||||
process.env["temu-email"],
|
||||
|
@ -119,7 +119,7 @@ const utils = require("./utils");
|
|||
const pagination = 10;
|
||||
let total_items = 0;
|
||||
let currentPage = 1;
|
||||
let maxPage = 200;
|
||||
let maxPage = 8;
|
||||
|
||||
/**
|
||||
* Capture response
|
||||
|
@ -164,6 +164,24 @@ const utils = require("./utils");
|
|||
`processed/`
|
||||
);
|
||||
|
||||
/*
|
||||
* update state
|
||||
*/
|
||||
const updateState = async (pageNumber, date) => {
|
||||
try {
|
||||
// update state
|
||||
const stateJson = {
|
||||
last_updated: date,
|
||||
last_page: pageNumber,
|
||||
};
|
||||
|
||||
// File path to save the JSON
|
||||
const filePath = path.join(__dirname, "state.json");
|
||||
fs.writeFileSync(filePath, JSON.stringify(stateJson, null, 2));
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
* download labels
|
||||
|
@ -271,8 +289,6 @@ const utils = require("./utils");
|
|||
console.log("No file downloaded.");
|
||||
}
|
||||
bIndex++;
|
||||
// extract the url and save in s3
|
||||
// await s3Utils.downloadAndUploadToS3(labelUrl, poNumber);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
|
@ -283,7 +299,6 @@ const utils = require("./utils");
|
|||
}
|
||||
};
|
||||
|
||||
|
||||
try {
|
||||
// get total items
|
||||
await page
|
||||
|
@ -303,9 +318,50 @@ const utils = require("./utils");
|
|||
|
||||
let total_pages = Math.ceil(total_items / pagination);
|
||||
console.log(`Total Pages count : ${total_pages}`);
|
||||
// crawl next pages
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
// crawl next pages
|
||||
const currentStatePage = JSON.parse(fs.readFileSync(__dirname + "/state.json")).last_page;
|
||||
if (currentStatePage > currentPage) {
|
||||
console.log(`Moving from ${currentPage} to ${currentStatePage}`);
|
||||
const hasNextBtn = await page.evaluate(() => {
|
||||
const liElement = document.querySelector(
|
||||
"li.PGT_next_123.PGT_disabled_123"
|
||||
);
|
||||
return liElement == null;
|
||||
});
|
||||
|
||||
// break if doesn't have next button
|
||||
if (!hasNextBtn) {
|
||||
console.log("No next button");
|
||||
break;
|
||||
}
|
||||
|
||||
if (currentPage > maxPage || currentPage > total_pages) {
|
||||
console.log("Last Page Reached");
|
||||
break;
|
||||
}
|
||||
|
||||
// goto next page
|
||||
if (hasNextBtn) {
|
||||
await page.evaluate( async() => {
|
||||
const liElement = document.querySelector("li.PGT_next_123");
|
||||
await new Promise((r) => setTimeout(r, 3000));
|
||||
if (liElement) {
|
||||
liElement.click();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
currentPage++;
|
||||
// wait
|
||||
continue;
|
||||
}
|
||||
|
||||
// update state
|
||||
await updateState(currentPage, luxon.DateTime.now().toISO());
|
||||
|
||||
console.log(`Crawling for page ${currentPage}`);
|
||||
|
||||
await utils.tryTemuLogin(page, email, password, loginPage);
|
||||
|
@ -360,6 +416,9 @@ const utils = require("./utils");
|
|||
console.log(e);
|
||||
}
|
||||
|
||||
// update state
|
||||
await updateState(1, luxon.DateTime.now().toISO());
|
||||
|
||||
console.log(
|
||||
`==========< ENDED --- DOWNLOAD SHIPPING LABELS ${luxon.DateTime.now()} >==========`
|
||||
);
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"last_updated": "2025-01-27T13:48:11.009+05:00",
|
||||
"last_page": 1
|
||||
}
|
Loading…
Reference in New Issue