Skip to content

Commit 55588d1

Browse files
committed
feat(api): pass tags parameter to a web scraper
1 parent f938249 commit 55588d1

File tree

3 files changed

+17
-1
lines changed

3 files changed

+17
-1
lines changed

src/trackers.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,7 @@ pub mod tests {
101101
Ok(Self {
102102
// Target properties.
103103
extractor: target.extractor.as_str(),
104+
tags: &tracker.tags,
104105
user_agent: target.user_agent.as_deref(),
105106
ignore_https_errors: target.ignore_https_errors,
106107
// Config properties.

src/trackers/api_ext.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -756,6 +756,7 @@ where
756756
let extractor = self.get_script_content(tracker, &target.extractor).await?;
757757
let scraper_request = WebScraperContentRequest {
758758
extractor: extractor.as_ref(),
759+
tags: &tracker.tags,
759760
user_agent: target.user_agent.as_deref(),
760761
ignore_https_errors: target.ignore_https_errors,
761762
timeout: tracker.config.timeout,
@@ -2865,7 +2866,11 @@ mod tests {
28652866

28662867
let trackers = api.trackers();
28672868
let tracker_one = trackers
2868-
.create_tracker(TrackerCreateParams::new("name_one").with_schedule("0 0 * * * *"))
2869+
.create_tracker(
2870+
TrackerCreateParams::new("name_one")
2871+
.with_schedule("0 0 * * * *")
2872+
.with_tags(vec!["tag:1".to_string(), "tag:common".to_string()]),
2873+
)
28692874
.await?;
28702875
let tracker_two = trackers
28712876
.create_tracker(TrackerCreateParams::new("name_two").with_schedule("0 0 * * * *"))

src/trackers/web_scraper/web_scraper_content_request.rs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,9 @@ pub struct WebScraperContentRequest<'a> {
1212
/// A script (Playwright scenario) used to extract web page content that needs to be tracked.
1313
pub extractor: &'a str,
1414

15+
/// Tags associated with the tracker.
16+
pub tags: &'a Vec<String>,
17+
1518
/// Optional user agent string to use for every request at the web page.
1619
pub user_agent: Option<&'a str>,
1720

@@ -44,13 +47,18 @@ mod tests {
4447
fn serialization() -> anyhow::Result<()> {
4548
assert_json_snapshot!(WebScraperContentRequest {
4649
extractor: "export async function execute(p) { await p.goto('http://localhost:1234/my/app?q=2'); return await p.content(); }",
50+
tags: &vec!["tag1".to_string(), "tag2".to_string()],
4751
timeout: Some(Duration::from_millis(100)),
4852
previous_content: Some(&json!("some content")),
4953
user_agent: Some("Retrack/1.0.0"),
5054
ignore_https_errors: true
5155
}, @r###"
5256
{
5357
"extractor": "export async function execute(p) { await p.goto('http://localhost:1234/my/app?q=2'); return await p.content(); }",
58+
"tags": [
59+
"tag1",
60+
"tag2"
61+
],
5462
"userAgent": "Retrack/1.0.0",
5563
"ignoreHTTPSErrors": true,
5664
"timeout": 100,
@@ -75,6 +83,7 @@ mod tests {
7583
)?
7684
.with_target(TrackerTarget::Page(target.clone()))
7785
.with_timeout(Duration::from_millis(2500))
86+
.with_tags(vec!["tag1".to_string(), "tag2".to_string()])
7887
.build();
7988

8089
let request = WebScraperContentRequest::try_from(&tracker)?;
@@ -83,6 +92,7 @@ mod tests {
8392
assert_eq!(request.extractor, target.extractor.as_str());
8493
assert_eq!(request.user_agent, target.user_agent.as_deref());
8594
assert_eq!(request.ignore_https_errors, target.ignore_https_errors);
95+
assert_eq!(request.tags, &tracker.tags);
8696

8797
// Config properties.
8898
assert_eq!(request.timeout, Some(Duration::from_millis(2500)));

0 commit comments

Comments
 (0)