Commit a393f32d authored by Knut Behrends's avatar Knut Behrends
Browse files

almost finish work on sample-inserter + fake-sample

parent ed3d6fdd
......@@ -33,13 +33,15 @@ async function get_columns(ax, model_name) {
let model = {}
try {
const response = await ax.get(base_url)
if (response?.data?.models?.length > 0) {
response.data.models
.find((m) => m.table.includes(model_name))
.columns.forEach((_) => (model[_] = undefined))
if (response.data?.models?.length > 0) {
model = response.data.models.find((m) => m.table.includes(model_name))
model.columns.forEach((_) => (model[_] = undefined))
}
} catch (error) {
console.error(error)
console.error(error.response?.data?.name)
console.error(error.response?.data?.message)
console.error(error.response?.statusText)
}
return new Promise((resolve, reject) => {
resolve(model)
......
......@@ -7,11 +7,10 @@ const Util = require("./fake-util")
//faker.seed(112)
const util = new Util()
module.exports = class CoreSectionSplit {
constructor(props, section, prev_section) {
module.exports = class CurationSample {
constructor(props, section) {
this._props = props
this._section = section
this._prev_section = prev_section
for (let k of [
"id",
"archive_files",
......@@ -24,41 +23,40 @@ module.exports = class CoreSectionSplit {
}
}
find_id() {
return faker.fake("{{}}{{}}")
}
find_section_split_id() {
return faker.fake("{{}}{{}}")
}
find_combined_id() {
return faker.fake("{{}}{{}}")
}
find_igsn() {
return faker.fake("{{}}{{}}")
return this._props.section_split_id
}
find_sample_date() {
return faker.fake("{{}}{{}}")
return this._props.section_split_id
}
find_top() {
return faker.fake("{{}}{{}}")
return (
this._section.top_depth +
util.frac_below(0.3) * this._section.section_length
)
}
find_bottom() {
return faker.fake("{{}}{{}}")
find_top() {
return (
this._section.top_depth +
util.frac_below(0.3) * this._section.section_length
)
}
find_interval() {
return faker.fake("{{}}{{}}")
return Math.random()
}
find_analyst() {
return faker.fake("{{}}{{}}")
return this._section.curator
}
find_sample_type() {
return faker.fake("{{}}{{}}")
return util.shuffle(CoreDefaults.sample_type).pop()
//return this._props.sample_type
}
find_section_top_mbsf() {
return faker.fake("{{}}{{}}")
return faker.fake("{{random.number}}") % 200
}
find_request_no() {
return faker.fake("{{}}{{}}")
return this._section.request_no
}
find_comment() {
......@@ -68,30 +66,32 @@ module.exports = class CoreSectionSplit {
.slice(-2)
.join(", ")
return faker.fake(
`core ${this.core_id}: {{company.catchPhraseAdjective}}, ${cond} ({{name.firstName}} {{name.lastName}})`
)
return faker.fake(`core ${this._section.core_id} `)
}
find_amount() {
return faker.fake("{{}}{{}}")
return faker.fake("{{random.float}}")
}
find_amount_unit() {
return faker.fake("{{}}{{}}")
return util.shuffle(CoreDefaults.sampling_units).pop()
}
find_repository() {
return faker.fake("{{}}{{}}")
return "AWI"
}
find_volume() {
return faker.fake("{{}}{{}}")
return this._section.sample_volume
}
find_sample_top_mbsf() {
return faker.fake("{{}}{{}}")
return faker.fake("{{random.number}}") % 300
}
find_sample_bottom_mbsf() {
return faker.fake("{{}}{{}}")
return this.sample_top_mbsf + (faker.fake("{{random.number}}") % 300)
}
find_scientist() {
return faker.fake("{{}}{{}}")
return Array.of(
this._props.scientist_1,
this._props.scientist_2,
this._props.scientist_3
).filter((s) => typeof s != "undefined")
}
fake() {
......
......@@ -46,30 +46,30 @@ const qs_request = {
sort: "id",
"filter[expedition_id]": options.expedition,
}
const qs_cores = {
name: "core",
const qs_splits = {
name: "split",
"per-page": -1,
page: 1,
sort: "id",
"filter[type]": "[^A]",
"filter[expedition_id]": options.expedition,
}
const qs_expedition = {
name: "expedition",
"filter[id]": options.expedition,
}
const qs_samples = {
name: "sample",
"filter[expedition_id]": options.expedition,
}
let scientists = {}
const scientist_get_url =
api_url_frag + "?" + querystring.stringify(qs_scientists)
let requests = {}
const requests_get_url = api_url_frag + "?" + querystring.stringify(qs_request)
const splits_get_url = api_url_frag + "?" + querystring.stringify(qs_splits)
const samples_get_url = api_url_frag + "?" + querystring.stringify(qs_samples)
let cores = {}
const cores_get_url = api_url_frag + "?" + querystring.stringify(qs_cores)
let expeditions = {}
// let qs = [qs_cores, qs_request, qs_scientists, qs_expedition]
// let qs = [qs_splits, qs_request, qs_scientists, qs_expedition]
// // check if cmdline arg was numerc or acronym
// Number.parseInt(options.expedition)
// ? qs.forEach((_) => qs["filter[acr]" = options.expedition))
......@@ -79,9 +79,10 @@ const expedition_get_url =
api_url_frag + "?" + querystring.stringify(qs_expedition)
const queries = {
expeditions: expedition_get_url,
cores: cores_get_url,
splits: splits_get_url,
scientists: scientist_get_url,
requests: requests_get_url,
samples: samples_get_url,
}
let queries_map = new Map()
......@@ -90,12 +91,20 @@ Object.keys(queries).forEach((k) => {
queries_map.set(k, axutil.get_items(ax, queries[k]))
})
let scientists_by_city = new Map()
Promise.all(queries_map.values())
.then((promise) => {
let query_results = []
const key_names = []
let exp_name = ""
Promise.resolve(promise)
.then((results) => (query_results = results))
.then((results) => {
query_results = results
exp_name = query_results[0][0]?.acr
? ` (${query_results[0][0].acr})`
: ""
})
.then((_) => {
let mapIter = queries_map.keys()
const no_results = []
......@@ -105,13 +114,18 @@ Promise.all(queries_map.values())
if (!v.length) {
no_results.push(k)
} else {
console.log(
`Expedition ${
options.expedition
} ${exp_name} has ${v.length.toString().padStart(4)} ${k}.`
)
}
console.log(`Expedition ${options.expedition} has ${v.length} ${k} .`)
})
if (no_results.length) {
console.error(`
########################################################################
Expedition ${options.expedition} has no ${no_results.join(", ")}.
Expedition ${options.expedition}${exp_name} has no ${no_results.join(", ")}.
To continue adding samples,
all ${key_names.join(", ")} are required to be > 0.
Must exit now.
......@@ -120,11 +134,113 @@ Promise.all(queries_map.values())
}
})
.then((_) => {
let scientists = query_results
let [expedition, splits, scientists, requests, samples] = query_results
// for
let existing_samples_reqno = samples
.map((s) => Number.parseInt(s.request_no))
.flat()
let previous_requests = requests.filter((r) =>
existing_samples_reqno.some((s) => r.request_no === s)
)
let req_len_before = requests.length
let new_requests = requests.filter((r) =>
existing_samples_reqno.every((s) => r.request_no !== s)
)
console.log(`
########################################################################
Found ${existing_samples_reqno.length} existing Samples with request-no,
${previous_requests.length} of which matched previous requests.
Therefore we'll insert ${new_requests.length} new sample requests.
(Total Requests: ${req_len_before} for Expedition ${options.expedition}${exp_name}.)
########################################################################`)
let section_split_id_set = new Set(splits.map((s) => s.section_id))
scientists.forEach((s) => (s.city = s.city.toUpperCase()))
scientists.sort(str_sort_by_city).map((sc) => {
let req = new_requests.filter(
(r) =>
r.scientist_1 === sc.full_name ||
r.scientist_2 === sc.full_name ||
r.scientist_3 === sc.full_name
)
let spl = splits.filter(
(s) => s.remarks && s.remarks.includes(sc.last_name)
)
scientists_by_city.set(sc.city, {
city: sc.city,
split: spl,
requests: req,
})
})
// make sure all teams from all cities sample at least one section
for (let [k, c] of scientists_by_city.entries()) {
if (!c.split.length) {
let randsection = util
.shuffle(Array.from(section_split_id_set.values()))
.pop()
let sections_split_ids = Array.of(
randsection - Math.trunc(Math.random() * 10),
randsection,
randsection + Math.trunc(Math.random() * 10)
)
c.split.push(
splits.find((sp) =>
sections_split_ids.some((ssi) => sp.section_id === ssi)
)
)
}
}
for (let c of scientists_by_city.values()) {
console.log({
"city": c.city,
"spli": c.split.map((v) => ({
[v.section_id]: v.combined_id,
})),
"req": Array(c.requests.map((v) => v.request_id)).join(", "),
})
}
})
.then(() => {
let curation_sample_columns = {}
axutil.get_columns(ax, "curation_sample_2").then((obj) => {
curation_sample_columns = obj
console.log(curation_sample_columns)
// for (let c of scientists_by_city.values()) {
// for (let req in c.requests) {
// r
// }
// }
})
})
})
.catch((error) => console.dir(error, { depth: 0 }))
function str_sort_by_city(a, b) {
let A = a.city.toUpperCase()
let B = b.city.toUpperCase()
if (A < B) {
return -1
}
if (A > B) {
return 1
}
return 0
}
function str_sort_by_site_hole(a, b) {
let A = a.combined_id.toUpperCase()
let B = b.combined_id.toUpperCase()
if (A < B) {
return -1
}
if (A > B) {
return 1
}
return 0
}
//console.log({ type: typeof _, keys: Object.keys(_).pop() })
// axutil
......@@ -150,7 +266,7 @@ Promise.all(queries_map.values())
// )
// })
// .then(() => {
// // only include cores that have whole rounds WR
// // only include splits that have whole rounds WR
// // and that do not have A and W items (s.exists)
// axutil
// .get_items(ax, scientist_get_url)
......
let obj = [
{
"request_part": "s",
"id": 5,
"expedition_id": 10,
"combined_id": "5363_s",
"request_id": "4533",
"request_type": "ShipShore",
"scientist_1": "Erick Hansen",
"scientist_2": null,
"scientist_3": null,
"purpose": "Q_TS",
"request_date": "2020-02-19",
"approval_date": "2020-04-01",
"completion_date": "2020-04-01",
"remarks": "maximize",
"sample_material": "",
"sample_volume": 11.41,
"request_no": 4533,
"request_complete": "yes",
"sample_amount": null,
"sample_unit": "ATP",
"analyst": "KH",
"program_id": 1,
},
]
let existing_samples_reqno = [4533]
let previous_requests = obj.filter((r) =>
existing_samples_reqno.some((s) => Number.parseInt(r.request_no) === s)
)
let req_len_before = obj.length
let new_requests = obj.filter((r) => {
const current_reqno = Number.parseInt(r.request_no)
// let equal_request_nos = (el) => el === current_reqno
return !(-1 === existing_samples_reqno.find((el) => el === current_reqno))
})
debugger
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment