Commit bd6e5f21 authored by Knut Behrends's avatar Knut Behrends
Browse files

begin work on fake-lithology + related files

parent a0cab22a
......@@ -5,3 +5,4 @@ section_example.json
notes.txt
test.js
scientists.json
data
#!/usr/bin/env node
"use strict"
//var moment = require("moment")
const faker = require("faker")
const CoreDefaults = require("./fake-core-defaults.js")
const Util = require("./fake-util")
//faker.seed(112)
const util = new Util()
module.exports = class LithologyGeology {
constructor(props, section_split) {
this._props = props
this._section = section_split
for (let k of [
"id",
"archive_files",
"combined_id",
"igsn",
"ukbgs_section_id",
"igsn_ukbgs",
]) {
delete this._props[k]
}
}
find_section_split_id() {
return this._props.section_split_id
}
find_bottom_depth() {
const b = util.round((
this.top + this.interval
), 1)
this._props.bottom_depth = b
return b
}
find_top_depth() {
const t = util.round((
this._section.top_depth +
util.frac_below(0.3) * (this._props.offset_m ) //cm
), 1)
this._props.top_depth = t
return t
}
find_interval() {
const sl = this._props.interval_cm
const minlen = 0.03 * (sl + 1)
let len = 0
if(sl < 10){
// allow larger pieces for smaller sections
len = sl * util.frac_above(0.3)
} else {
len = sl * util.frac_below(0.2)
}
const int = util.round((minlen + len), 1)
this._props.interval= int
return int
}
find_analyst() {
return util.shuffle(CoreDefaults.curator).pop()
}
find_rock_class() {
return util.shuffle(CoreDefaults.rock_class).pop()
//return this._props.sample_type
}
find_rock_type() {
return util.shuffle(CoreDefaults.rock_type).pop()
//return this._props.sample_type
}
find_description() {
let cond = util
.shuffle(CoreDefaults.sample_state)
.map((c) => c.toLowerCase())
.slice(-2)
.join(", ")
return `core id: ${this._section.core_id} ${this._props.remarks}`
}
find_color() {
return util.round(faker.fake("{{random.color}}") )
}
fake() {
Object.keys(this._props).forEach((property) => {
this._props[property] = this[property]
})
return this._props
}
get section_split_id() {
return this._props.section_split_id ? this._props.section_split_id : this.find_section_split_id()
}
get litho_unit() {
return this._props.litho_unit ? this._props.litho_unit : this.find_litho_unit()
}
get top_depth() {
return this._props.top_depth ? this._props.top_depth : this.find_top_depth()
}
get interval() {
return this._props.interval ? this._props.interval : this.find_interval()
}
get bottom_depth() {
return this._props.bottom_depth ? this._props.bottom_depth : this.find_bottom_depth()
}
get rock_class() {
return this._props.rock_class ? this._props.rock_class : this.find_rock_class()
}
get rock_type() {
return this._props.rock_type ? this._props.rock_type : this.find_rock_type()
}
get description() {
return this._props.description ? this._props.description : this.find_description()
}
get analyst() {
return this._props.analyst ? this._props.analyst : this.find_analyst()
}
get color() {
return this._props.color ? this._props.color : this.find_color()
}
get composition() {
return this._props.composition ? this._props.composition : this.find_composition()
}
get combined_id() {
return this._props.combined_id ? this._props.combined_id : this.find_combined_id()
}
get mcd_top() {
return this._props.mcd_top ? this._props.mcd_top : this.find_mcd_top()
}
get mcd_top_depth_unit() {
return this._props.mcd_top_depth_unit ? this._props.mcd_top_depth_unit : this.find_mcd_top_depth_unit()
}
get mcd_bottom() {
return this._props.mcd_bottom ? this._props.mcd_bottom : this.find_mcd_bottom()
}
get mcd_bottom_unit() {
return this._props.mcd_bottom_unit ? this._props.mcd_bottom_unit : this.find_mcd_bottom_unit()
}
get section_length() {
return this._props.section_length ? this._props.section_length : this.find_section_length()
}
}
#!/usr/bin/env node
"use strict"
//var moment = require("moment")
const faker = require("faker")
const CoreDefaults = require("./fake-core-defaults.js")
const Util = require("./fake-util")
//faker.seed(112)
const util = new Util()
module.exports = class CurationSample {
constructor(props, section_split) {
this._props = props
this._section = section_split
for (let k of [
"id",
"archive_files",
"combined_id",
"igsn",
"ukbgs_section_id",
"igsn_ukbgs",
]) {
delete this._props[k]
}
}
find_section_split_id() {
return this._props.section_split_id
}
find_sample_date() {
return this._props.sample_date
}
find_bottom() {
const b = util.round((
this.top + this.interval
), 1)
this._props.bottom = b
return b
}
find_top() {
const t = util.round((
this._section.top_depth +
util.frac_below(0.3) * (this._props.section_length_cm ) //cm
), 1)
this._props.top = t
return t
}
find_interval() {
const sl = this._props.section_length_cm
const minlen = 0.03 * (sl + 1)
let len = 0
if(sl < 10){
// allow larger pieces for smaller sections
len = sl * util.frac_above(0.3)
} else {
len = sl * util.frac_below(0.2)
}
const int = util.round((minlen + len), 1)
this._props.interval= int
return int
}
find_analyst() {
return util.shuffle(CoreDefaults.curator).pop()
}
find_sample_type() {
return util.shuffle(CoreDefaults.sample_purpose).pop()
//return this._props.sample_type
}
find_section_top_mbsf() {
const t = util.round(this._section.top_depth/100,0) + 1000 //faker.fake("{{random.number}}") % 200
this._props.top_mbsf = t
return t
}
find_description() {
let cond = util
.shuffle(CoreDefaults.sample_state)
.map((c) => c.toLowerCase())
.slice(-2)
.join(", ")
return `core id: ${this._section.core_id} ${this._props.remarks}`
}
find_color() {
return util.round(faker.fake("{{random.color}}") )
}
find_volume() {
return this._props.litho_unit || "U0000"
}
fake() {
Object.keys(this._props).forEach((property) => {
this._props[property] = this[property]
})
return this._props
}
}
#!/usr/bin/env node
"use strict"
// knb July 2020
// use Node v14+
// insert sample-requests into an mDIS database
//
// call this script with:
// ../api-caller-request-inserter.sh
// or
// node src/request-inserter.js
const config = require("./config.js")
const axios = require("axios")
const axutil = require("./axios-util.js")
const querystring = require("querystring")
const getopts = require("getopts")
const Util = require("./classes/fake-util.js")
const LithologyGeology = require("./classes/fake-lithology.js")
const util = new Util()
const options = getopts(process.argv.slice(2), {
alias: {
expedition: "e", // PK column 'id' of core_core table
//infile: "i", // alternative
},
default: {
expedition: 10, //CAIR
},
})
//let core_url = "/api/v1/form?name=core&per-page=1&sort=-id"
//const core_info = options.infile ? JSON.parse(options.infile) : {}
const ax = axios.create(config.endpoint)
const api_url_frag = `/api/v1/form`
// al sections of
const qs_splits = {
name: "split",
"per-page": -1,
page: 1,
sort: "id",
"filter[type]": "[^A]",
"filter[expedition_id]": options.expedition,
}
const qs_expedition = {
name: "expedition",
"filter[id]": options.expedition,
}
const qs_lithology = {
name: "lithology",
"filter[expedition_id]": options.expedition,
}
const splits_get_url = api_url_frag + "?" + querystring.stringify(qs_splits)
const lithology_get_url = api_url_frag + "?" + querystring.stringify(qs_lithology)
delete qs_lithology["filter[expedition_id]"]
const lithology_post_url = api_url_frag + "?" + querystring.stringify(qs_lithology)
const expedition_get_url =
api_url_frag + "?" + querystring.stringify(qs_expedition)
const queries = {
// expeditions: expedition_get_url,
// cores: cores_get_url,
// sections: sections_get_url,
splits: splits_get_url,
lithounits: lithology_get_url,
}
let queries_map = new Map()
Object.keys(queries).forEach((k) => {
console.log(querystring.unescape(queries[k]))
queries_map.set(k, axutil.get_items(ax, queries[k]))
})
let lithounits_by_core = new Map()
Promise.all(queries_map.values())
.then((promise) => {
let query_results = []
const key_names = []
let exp_name = ""
Promise.resolve(promise)
.then((results) => {
query_results = results
exp_name = query_results[0][0]?.acr
? ` (${query_results[0][0].acr})`
: ""
})
.then((_) => {
let mapIter = queries_map.keys()
const no_results = []
query_results.forEach((v) => {
let k = mapIter.next().value
key_names.push(k)
if (k !== "lithounits" && !v.length) {
no_results.push(k)
} else {
if (k == "lithounits" && !v.length) v = ""
console.log(
`Expedition ${
options.expedition
} ${exp_name} has ${v.length.toString().padStart(4)} ${k}.`
)
}
})
if (no_results.length) {
console.error(`
########################################################################
Expedition ${options.expedition}${exp_name} has no ${no_results.join(", ")}.
To continue adding lithology or lithological units,
all ${key_names.join(", ")} are required to be > 0.
Must exit now.
########################################################################`)
process.exit(0)
}
})
.then((_) => {
let [splits, lithology] = query_results
// for
let distinct_splits_with_lithounits = new Set(),
distinct_existing_lithounits = new Set()
let distinct_splits = new Set(splits.map(s => s.id))
let distinct_sections = new Set(splits.map(s => s.section_id))
let distinct_cores = new Set(splits.map(s => s.core_id))
if (!Array.isArray(lithology)) {
distinct_existing_lithounits = []
} else {
existing_lithounits = lithology
.map((s) => ({
[`${s.combined_id}`]: {
split_id: s.section_split_id,
litho_unit: `${s.litho_unit}`
}
}))
// let new_splits = cores.filter((c) =>
// existing_lithounits.every((s) => c.request_no !== s)
// )
distinct_splits_with_lithounits = new Set(existing_lithounits.values())
distinct_existing_lithounits = new Set(existing_lithounits.map(l => l.litho_unit))
}
console.log(`
########################################################################
Already described:
Found ${distinct_existing_lithounits.size || 0} existing Lithounits,
from ${distinct_splits_with_lithounits.size} section-splits.
New:
Found ${distinct_splits.size} existing Splits,
from ${distinct_sections.size} sections and ${distinct_cores.size} cores
(Existing splits ${splits.length} for Expedition ${options.expedition}${exp_name}.)
########################################################################`)
let splits_by_site = groupBy(splits, "site_id")
let splits_by_hole = groupBy(splits, "hole_id")
let splits_by_cores = groupBy(splits, "core_id")
// for each site, pick one probabilities for of igneous, metamorphic, sedimentary
let {igneous, metamorphic, sedimentary} = assign3probs()
// for each hole, pick some {class: ..., group...} pairs from the most common
// make sure all cores from that site and hole get assigned rocks from that pair
// for (let [core_id, split] of sections_by_cores.entries()) {
// let lithology_template = {} // get a new fake lithology
// // vary it through all
// if (!c.split_section.length) {
// let randsection = util
// .shuffle(Array.from(section_split_id_set.values()))
// .pop()
// // This step might pick "untouchable" "A" section-splits for sampling,
// // but that's okay. Correct manually later
// let sections_split_ids = Array.of(
// randsection - Math.trunc(Math.random() * 10),
// randsection,
// randsection + Math.trunc(Math.random() * 10)
// )
// c.split_section.push(
// splits.find((sp) =>
// sections_split_ids.some((ssi) => sp.id === ssi)
// )
// )
// }
// }
// for (let c of lithounits_by_core.values()) {
// console.log({
// "city": c.city,
// "spli": c.split_section.map((v) => ({
// [v.id]: v.combined_id,
// })),
// "req": Array(c.requests.map((v) => v.request_id)).join(", "),
// })
// }
// })
// .then(() => {
// let curation_sample_columns = {}
// axutil.get_columns(ax, "curation_sample_2")
// .then((curation_sample) => {
// let fake_sample_data = []
// let i = 0
// for (let s of lithounits_by_core.values()) {
// // create an array of 0-10 requests per scientist,
// // assign a few echo sections_split(s) "reserved" for this team
// // (usually there are only 1-2 splits for this team)
// for (let sp of s.split_section){
// let n_picked = Number.parseInt(util.frac_below(0.666) * s.requests.length)
// let request_ids_per_city = util.shuffle(s.requests).slice(-n_picked)
// // ~ 5 requests for this split
// for (let r of request_ids_per_city) {
// let sec_len = sp.section_length ? sp.section_length : util.round(util.frac_above(0.5), 2)
// let fake_sample = new LithologyGeology(
// Object.assign({}, curation_sample, {
// request_id: r.request_id,
// request_no: `${r.request_no}`,
// scientist_1: r.scientist_1,
// scientist_2: r.scientist_2,
// scientist_3: r.scientist_3,
// section_split_id: sp.id,
// section_length_cm: util.round(((util.frac_below(0.1) * sec_len * 100) + 1),0),
// remarks: `SecId:${sp.section_id}, secLen ${util.round(sp.section_length,1)}m`,
// sample_date : Array.of(r.completion_date, r.approval_date,
// r.request_date, util.date_between()).find(d => d != null)
// }),
// sp
// ).fake()
// fake_sample_data.push(
// Object.assign({}, fake_sample)
// )
// i++
// }
// }
// }
// console.log(`Created ${i} fake lithology, will try to upload`)
// // console.log(fake_sample_data[0])
// return new Promise((resolve, reject) => {
// resolve(fake_sample_data), reject(fake_sample_data)
// })
// })
// .then((fake_sample_data) => {
// return fake_sample_data.flat().map((fake_sample, i) => {
// axutil.upload(
// ax,
// lithology_post_url,
// fake_sample,
// i
// )
// })
})
// .then((fake_request_promises) =>
// Promise.all(fake_request_promises).then((s) =>
// console.log(`
// #############################################################
// Expedition ${options.expedition}: Tried to upload ${fake_request_promises.length} lithounits
// ############################################################# `)
// )
// )
})
// })
.catch((error) => console.dir(error, { depth: 0 }))
function str_sort_by_core_type(a, b) {
let A = a.core_type.toUpperCase()
let B = b.core_type.toUpperCase()
if (A < B) {
return -1
}
if (A > B) {
return 1
}
return 0
}
// from MDN
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/reduce
function groupBy(objectArray, property) {
return objectArray.reduce(function (acc, obj) {
let key = obj[property]
if (!acc[key]) {
acc[key] = []
}
acc[key].push(obj)
return acc
}, {})
}
function assign3probs(){
let probs = [util.frac_below(0.2) + 0.05, util.frac_above(0.3)-0.05]
probs.push(1-probs[0]-probs[1])
return util.shuffle(probs).map(p => util.round(p, 3))
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment