Remove _db in func names of db interactions
This commit is contained in:
19
src/db.rs
19
src/db.rs
@@ -93,7 +93,7 @@ impl DBTable for ParsedPage {
|
|||||||
";
|
";
|
||||||
}
|
}
|
||||||
impl ParsedPage {
|
impl ParsedPage {
|
||||||
pub fn lookup_db(conn: &Connection, timestamp: DateTime<Utc>) -> Option<Self> {
|
pub fn lookup(conn: &Connection, timestamp: DateTime<Utc>) -> Option<Self> {
|
||||||
let mut stmt = conn
|
let mut stmt = conn
|
||||||
.prepare(&format!(
|
.prepare(&format!(
|
||||||
"SELECT * FROM {} WHERE timestamp = ?",
|
"SELECT * FROM {} WHERE timestamp = ?",
|
||||||
@@ -110,7 +110,7 @@ impl ParsedPage {
|
|||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_or_update_db(&self, conn: &Connection) {
|
pub fn add_or_update(&self, conn: &Connection) {
|
||||||
let _ = conn
|
let _ = conn
|
||||||
.execute(
|
.execute(
|
||||||
&format!(
|
&format!(
|
||||||
@@ -148,7 +148,7 @@ impl DBTable for ParsedStorage {
|
|||||||
";
|
";
|
||||||
}
|
}
|
||||||
impl ParsedStorage {
|
impl ParsedStorage {
|
||||||
pub fn lookup_db(conn: &Connection, item: i64) -> Vec<ParsedStorage> {
|
pub fn lookup(conn: &Connection, item: i64) -> Vec<ParsedStorage> {
|
||||||
let mut stmt = conn
|
let mut stmt = conn
|
||||||
.prepare(&format!("SELECT * FROM {} WHERE id = ?", Self::TABLE_NAME))
|
.prepare(&format!("SELECT * FROM {} WHERE id = ?", Self::TABLE_NAME))
|
||||||
.ok()
|
.ok()
|
||||||
@@ -173,7 +173,7 @@ impl ParsedStorage {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_or_update_db(&self, conn: &Connection) {
|
pub fn add_or_update(&self, conn: &Connection) {
|
||||||
let _ = conn.execute(&format!("
|
let _ = conn.execute(&format!("
|
||||||
INSERT OR REPLACE INTO {}
|
INSERT OR REPLACE INTO {}
|
||||||
(item, total_gigabytes, quantity, sizes_gigabytes, parse_engine, need_description_check)
|
(item, total_gigabytes, quantity, sizes_gigabytes, parse_engine, need_description_check)
|
||||||
@@ -440,18 +440,15 @@ mod tests {
|
|||||||
parse_engine: 9,
|
parse_engine: 9,
|
||||||
needed_description_check: true,
|
needed_description_check: true,
|
||||||
};
|
};
|
||||||
parsed.add_or_update_db(&db);
|
parsed.add_or_update(&db);
|
||||||
assert_eq!(ParsedStorage::lookup_db(&db, listing.id), vec![parsed]);
|
assert_eq!(ParsedStorage::lookup(&db, listing.id), vec![parsed]);
|
||||||
|
|
||||||
let page = ParsedPage {
|
let page = ParsedPage {
|
||||||
category: "ssd".to_owned(),
|
category: "ssd".to_owned(),
|
||||||
timestamp: std::time::SystemTime::now().into(),
|
timestamp: std::time::SystemTime::now().into(),
|
||||||
};
|
};
|
||||||
page.add_or_update_db(&db);
|
page.add_or_update(&db);
|
||||||
assert_eq!(
|
assert_eq!(ParsedPage::lookup(&db, page.timestamp), Some(page.clone()));
|
||||||
ParsedPage::lookup_db(&db, page.timestamp),
|
|
||||||
Some(page.clone())
|
|
||||||
);
|
|
||||||
|
|
||||||
let apperance = ItemAppearances {
|
let apperance = ItemAppearances {
|
||||||
item: listing.item_id,
|
item: listing.item_id,
|
||||||
|
70
src/main.rs
70
src/main.rs
@@ -24,7 +24,7 @@ async fn page_get(
|
|||||||
db: Data<Mutex<rusqlite::Connection>>,
|
db: Data<Mutex<rusqlite::Connection>>,
|
||||||
timestamp: web::Path<i64>,
|
timestamp: web::Path<i64>,
|
||||||
) -> Result<impl Responder> {
|
) -> Result<impl Responder> {
|
||||||
Ok(web::Json(ParsedPage::lookup_db(
|
Ok(web::Json(ParsedPage::lookup(
|
||||||
&db.lock().unwrap(),
|
&db.lock().unwrap(),
|
||||||
chrono::DateTime::from_timestamp(*timestamp, 0).unwrap(),
|
chrono::DateTime::from_timestamp(*timestamp, 0).unwrap(),
|
||||||
)))
|
)))
|
||||||
@@ -71,7 +71,7 @@ async fn parse_listings(db: Data<Mutex<rusqlite::Connection>>) -> Result<impl Re
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|l| parser_storage::parse_size_and_quantity(l.0, &l.1))
|
.map(|l| parser_storage::parse_size_and_quantity(l.0, &l.1))
|
||||||
.inspect(|_| cnt = cnt + 1)
|
.inspect(|_| cnt = cnt + 1)
|
||||||
.for_each(|ps| ps.add_or_update_db(&db_unlocked));
|
.for_each(|ps| ps.add_or_update(&db_unlocked));
|
||||||
|
|
||||||
Ok(web::Json(cnt))
|
Ok(web::Json(cnt))
|
||||||
}
|
}
|
||||||
@@ -81,10 +81,7 @@ async fn listing_parse_get(
|
|||||||
db: Data<Mutex<rusqlite::Connection>>,
|
db: Data<Mutex<rusqlite::Connection>>,
|
||||||
id: web::Path<i64>,
|
id: web::Path<i64>,
|
||||||
) -> Result<impl Responder> {
|
) -> Result<impl Responder> {
|
||||||
Ok(web::Json(ParsedStorage::lookup_db(
|
Ok(web::Json(ParsedStorage::lookup(&db.lock().unwrap(), *id)))
|
||||||
&db.lock().unwrap(),
|
|
||||||
*id,
|
|
||||||
)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn timestamps_from_dir(path: &Path) -> Vec<i64> {
|
pub fn timestamps_from_dir(path: &Path) -> Vec<i64> {
|
||||||
@@ -121,22 +118,20 @@ async fn parse_post(
|
|||||||
// Ensure the category is created.
|
// Ensure the category is created.
|
||||||
let url: serde_json::Value =
|
let url: serde_json::Value =
|
||||||
serde_json::from_str(&std::fs::read_to_string(dir.join("url.json")).unwrap()).unwrap();
|
serde_json::from_str(&std::fs::read_to_string(dir.join("url.json")).unwrap()).unwrap();
|
||||||
info!("{:?}", url);
|
|
||||||
let su = SearchURL {
|
let su = SearchURL {
|
||||||
full_url: url.to_string(),
|
full_url: url.to_string(),
|
||||||
name: category.to_string(),
|
name: category.to_string(),
|
||||||
};
|
};
|
||||||
info!("{:?}", su);
|
|
||||||
su.add_or_update(&db.lock().unwrap());
|
su.add_or_update(&db.lock().unwrap());
|
||||||
|
|
||||||
let added: u64 = timestamps_from_dir(dir)
|
// Find all pages.
|
||||||
.iter()
|
let pages = timestamps_from_dir(dir);
|
||||||
.filter(|t| {
|
|
||||||
info!("Checking for the existance of page {t}");
|
// See what pages haven't been seen before.
|
||||||
let p = ParsedPage::lookup_db(
|
let to_parse = pages.iter().filter(|t| {
|
||||||
&db.lock().unwrap(),
|
let ts = chrono::DateTime::from_timestamp(**t, 0).unwrap();
|
||||||
chrono::DateTime::from_timestamp(**t, 0).unwrap(),
|
info!("Checking if page with a timestamp of {ts} and catagory of {category} exists");
|
||||||
);
|
let p = ParsedPage::lookup(&db.lock().unwrap(), ts);
|
||||||
|
|
||||||
// Timestamp never seen before, lets pass it on.
|
// Timestamp never seen before, lets pass it on.
|
||||||
if p.is_none() {
|
if p.is_none() {
|
||||||
@@ -149,35 +144,34 @@ async fn parse_post(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
})
|
});
|
||||||
.map(|t| {
|
|
||||||
let timestamp = chrono::DateTime::from_timestamp(*t, 0).unwrap();
|
let mut added_count = 0;
|
||||||
info!("Adding or updating db with timestamp:{timestamp} catagory:{category}");
|
for p in to_parse {
|
||||||
|
let ts = chrono::DateTime::from_timestamp(*p, 0).unwrap();
|
||||||
|
info!("Adding page with a timestamp of {ts} and catagory of {category} to db");
|
||||||
ParsedPage {
|
ParsedPage {
|
||||||
timestamp: timestamp,
|
timestamp: ts,
|
||||||
category: category.to_string(),
|
category: category.to_string(),
|
||||||
}
|
}
|
||||||
.add_or_update_db(&db.lock().unwrap());
|
.add_or_update(&db.lock().unwrap());
|
||||||
|
|
||||||
let mut cnt = 0;
|
let elements = parser_ebay::extract_data_from_html(
|
||||||
parser_ebay::extract_data_from_html(
|
&std::fs::read_to_string(dir.join(format!("{ts}.html"))).unwrap(),
|
||||||
&std::fs::read_to_string(dir.join(format!("{t}.html"))).unwrap(),
|
&ts,
|
||||||
×tamp,
|
|
||||||
&category,
|
&category,
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap();
|
||||||
.iter()
|
|
||||||
.for_each(|lp| {
|
|
||||||
cnt = cnt + 1;
|
|
||||||
lp.0.add_or_update(&db.lock().unwrap());
|
|
||||||
lp.1.add_or_update(&db.lock().unwrap());
|
|
||||||
info!("Inserting id:{}, title:{}", lp.0.item_id, lp.0.title);
|
|
||||||
});
|
|
||||||
cnt
|
|
||||||
})
|
|
||||||
.sum();
|
|
||||||
|
|
||||||
Ok(added.to_string())
|
added_count += elements.len();
|
||||||
|
for e in elements {
|
||||||
|
e.0.add_or_update(&db.lock().unwrap());
|
||||||
|
e.1.add_or_update(&db.lock().unwrap());
|
||||||
|
info!("Inserting id:{}, title:{}", e.0.item_id, e.0.title);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(added_count.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_web::main]
|
#[actix_web::main]
|
||||||
|
Reference in New Issue
Block a user