From 2a60baa5d6f5eb8bee798267c7227b315c61dc07 Mon Sep 17 00:00:00 2001 From: VadimKeller Date: Thu, 30 Sep 2021 08:47:47 +0200 Subject: [PATCH 1/3] dont execute initializers on db create --- config/initializers/delayed_job_config.rb | 4 ++++ config/initializers/eln_features.rb | 6 ++---- config/shrine_config.yml | 14 ++++++++++++++ 3 files changed, 20 insertions(+), 4 deletions(-) create mode 100644 config/shrine_config.yml diff --git a/config/initializers/delayed_job_config.rb b/config/initializers/delayed_job_config.rb index ddff927f15..914317bbc2 100644 --- a/config/initializers/delayed_job_config.rb +++ b/config/initializers/delayed_job_config.rb @@ -1,3 +1,7 @@ +# frozen_string_literal: true + +return if ARGV.include?('db:create') || ARGV.include?('db:setup') + # Delayed::Worker.destroy_failed_jobs = false # Delayed::Worker.sleep_delay = 60 # Delayed::Worker.max_attempts = 3 diff --git a/config/initializers/eln_features.rb b/config/initializers/eln_features.rb index b4836159d1..74db38d902 100644 --- a/config/initializers/eln_features.rb +++ b/config/initializers/eln_features.rb @@ -1,10 +1,8 @@ # frozen_string_literal: true -# pack version -pack_path = Webpacker.manifest.send(:data)&.fetch('application.js', nil) -ENV['VERSION_ASSETS'] = pack_path && File.basename(pack_path) +return if ARGV.include?('db:create') || ARGV.include?('db:setup') -ActiveSupport.on_load(:active_record) do +Rails.application.configure do # config.eln_features = ActiveSupport::OrderedOptions.new # config.eln_features.merge(ActiveRecord::Base.connection.table_exists?('matrices') ? Matrice.pluck(:name, :id).to_h : {}) diff --git a/config/shrine_config.yml b/config/shrine_config.yml new file mode 100644 index 0000000000..6c66715a60 --- /dev/null +++ b/config/shrine_config.yml @@ -0,0 +1,14 @@ +development: + :maximum_size: 100 + :cache: 'uploads/development/cache' + :store: 'uploads/development' + +test: + :maximum_size: 100 + :cache: 'uploads/test/cache' + :store: 'uploads/test' + +production: + :maximum_size: 100 + :cache: 'uploads/production/cache' + :store: 'uploads/production' \ No newline at end of file From 63d5bd10c0f4d05b2f342a6cedaac7665bddb227 Mon Sep 17 00:00:00 2001 From: Martin Schneider Date: Fri, 15 Oct 2021 12:29:44 +0200 Subject: [PATCH 2/3] Refactorings for ReactionDetails and SampleDetails This commit refactors `ReactionDetails` and `SampleDetails` to make them pass eslint checks. We had unused functions there, which will be removed by this commit: - SampleDetails#initiateAnalysisButton - SampleDetails#initiateAnalysisWithKind - SampleDetails#transferToDeviceButton Also disables `forbid-prop-types` in .eslintrc for now. This can be taken out later, when we can better specify the propTypes across the entire project. --- .eslintrc | 3 +- app/packs/src/components/ReactionDetails.js | 250 ++++++++------- app/packs/src/components/SampleDetails.js | 319 ++++++++------------ 3 files changed, 268 insertions(+), 304 deletions(-) diff --git a/.eslintrc b/.eslintrc index bda07eb179..6499fce11c 100644 --- a/.eslintrc +++ b/.eslintrc @@ -12,6 +12,7 @@ "no-console": ["off"], "comma-dangle": [1,"only-multiline"], "react/jsx-filename-extension": [1, { "extensions": [".js", ".jsx"] }], - "prefer-destructuring": ["error", {"object": true, "array": false}] + "prefer-destructuring": ["error", {"object": true, "array": false}], + "react/forbid-prop-types": ["off"] } } diff --git a/app/packs/src/components/ReactionDetails.js b/app/packs/src/components/ReactionDetails.js index e5b5d3190e..e9ff798681 100644 --- a/app/packs/src/components/ReactionDetails.js +++ b/app/packs/src/components/ReactionDetails.js @@ -1,12 +1,14 @@ import React, { Component } from 'react'; +import Aviator from 'aviator'; import PropTypes from 'prop-types'; import { - Col, Panel, ListGroupItem, ButtonToolbar, Button, + Panel, ListGroupItem, ButtonToolbar, Button, Tabs, Tab, OverlayTrigger, Tooltip } from 'react-bootstrap'; import SvgFileZoomPan from 'react-svg-file-zoom-pan-latest'; import { findIndex } from 'lodash'; import uuid from 'uuid'; +import Immutable from 'immutable'; import ElementCollectionLabels from './ElementCollectionLabels'; import ElementAnalysesLabels from './ElementAnalysesLabels'; import ElementActions from './actions/ElementActions'; @@ -32,17 +34,39 @@ import ExportSamplesBtn from './ExportSamplesBtn'; import CopyElementModal from './common/CopyElementModal'; import { permitOn } from './common/uis'; import { addSegmentTabs } from './generic/SegmentDetails'; -import Immutable from 'immutable'; import ElementDetailSortTab from './ElementDetailSortTab'; import ScifinderSearch from './scifinder/ScifinderSearch'; +const handleProductClick = (product) => { + const uri = Aviator.getCurrentURI(); + const uriArray = uri.split(/\//); + Aviator.navigate(`/${uriArray[1]}/${uriArray[2]}/sample/${product.id}`, { silent: true }); + sampleShowOrNew({ params: { sampleID: product.id } }); +}; + +const productLink = product => ( + + Analysis: +   + + +); + export default class ReactionDetails extends Component { constructor(props) { super(props); const { reaction } = props; this.state = { - reaction: reaction, + reaction, literatures: reaction.literatures, activeTab: UIStore.getState().reaction.activeTab, visible: Immutable.List(), @@ -59,14 +83,14 @@ export default class ReactionDetails extends Component { this.handleSubmit = this.handleSubmit.bind(this); this.onTabPositionChanged = this.onTabPositionChanged.bind(this); this.handleSegmentsChange = this.handleSegmentsChange.bind(this); - if(!reaction.reaction_svg_file) { + if (!reaction.reaction_svg_file) { this.updateReactionSvg(); } } componentDidMount() { - UIStore.listen(this.onUIStoreChange) + UIStore.listen(this.onUIStoreChange); } // eslint-disable-next-line camelcase @@ -97,17 +121,51 @@ export default class ReactionDetails extends Component { } componentWillUnmount() { - UIStore.unlisten(this.onUIStoreChange) + UIStore.unlisten(this.onUIStoreChange); } onUIStoreChange(state) { - if (state.reaction.activeTab != this.state.activeTab) { + if (state.reaction.activeTab !== this.state.activeTab) { this.setState({ activeTab: state.reaction.activeTab }); } } + onTabPositionChanged(visible) { + this.setState({ visible }); + } + + updateReactionSvg() { + const { reaction } = this.state; + const materialsSvgPaths = { + starting_materials: reaction.starting_materials.map(material => material.svgPath), + reactants: reaction.reactants.map(material => material.svgPath), + products: reaction.products.map(material => [material.svgPath, material.equivalent]) + }; + + const solvents = reaction.solvents.map((s) => { + const name = s.preferred_label; + return name; + }).filter(s => s); + + let temperature = reaction.temperature_display; + if (/^[-|\d]\d*\.{0,1}\d{0,2}$/.test(temperature)) { + temperature = `${temperature} ${reaction.temperature.valueUnit}`; + } + + ReactionSvgFetcher.fetchByMaterialsSvgPaths( + materialsSvgPaths, + temperature, + solvents, + reaction.duration, + reaction.conditions + ).then((result) => { + reaction.reaction_svg_file = result.reaction_svg; + this.setState(reaction); + }); + } + handleSubmit(closeView = false) { LoadingActions.start(); @@ -124,16 +182,17 @@ export default class ReactionDetails extends Component { } reactionIsValid() { - const {reaction} = this.state; + const { reaction } = this.state; return reaction.hasMaterials() && reaction.SMGroupValid(); } - handleReactionChange(reaction, options={}) { - reaction.changed = true; - if(options.schemaChanged) { - this.setState({ reaction }, () => this.updateReactionSvg()); + handleReactionChange(reaction, options = {}) { + const updatedReaction = reaction; + updatedReaction.changed = true; + if (options.schemaChanged) { + this.setState({ updatedReaction }, () => this.updateReactionSvg()); } else { - this.setState({ reaction }); + this.setState({ updatedReaction }); } } @@ -145,7 +204,7 @@ export default class ReactionDetails extends Component { } else if (type === 'rfValue') { value = rfValueFormat(event.target.value) || ''; } else { - value = event.target.value; + ({ value } = event.target); } const { reaction } = this.state; @@ -154,42 +213,20 @@ export default class ReactionDetails extends Component { this.handleReactionChange(newReaction, options); } - handleProductClick(product) { - const uri = Aviator.getCurrentURI(); - const uriArray = uri.split(/\//); - Aviator.navigate(`/${uriArray[1]}/${uriArray[2]}/sample/${product.id}`, { silent: true }); - sampleShowOrNew({ params: { sampleID: product.id} }); - } - handleProductChange(product, cb) { - let {reaction} = this.state - - reaction.updateMaterial(product) - reaction.changed = true + const { reaction } = this.state; - this.setState({ reaction }, cb) - } + reaction.updateMaterial(product); + reaction.changed = true; - productLink(product) { - return ( - - Analysis: -   - this.handleProductClick(product)} - style={{cursor: 'pointer'}} - title="Open sample window"> -  {product.title()} - - - ) + this.setState({ reaction }, cb); } productData(reaction) { const { products } = this.state.reaction; const tabs = products.map((product, key) => { - const title = this.productLink(product); + const title = productLink(product); const setState = () => this.handleProductChange(product); const handleSampleChanged = (_, cb) => this.handleProductChange(product, cb); @@ -234,10 +271,10 @@ export default class ReactionDetails extends Component { extraTab(ind) { const reaction = this.state.reaction || {}; const num = ind; - const NoName = XTabs["content"+num]; - const TabName = XTabs["title"+num]; + const NoName = XTabs[`content${num}`]; + const TabName = XTabs[`title${num}`]; return ( - + @@ -245,23 +282,24 @@ export default class ReactionDetails extends Component { ); } - reactionSVG(reaction) { - if(!reaction.svgPath) { + reactionSVG() { + const { reaction } = this.state; + + if (!reaction.svgPath) { return false; - } else { - const svgProps = reaction.svgPath.substr(reaction.svgPath.length - 4) === '.svg' ? { svgPath: reaction.svgPath } : { svg: reaction.reaction_svg_file } - if(reaction.hasMaterials()) { - return ( - )} } + + const svgProps = reaction.svgPath.substr(reaction.svgPath.length - 4) === '.svg' ? { svgPath: reaction.svgPath } : { svg: reaction.reaction_svg_file }; + return ( + ); } reactionHeader(reaction) { - let hasChanged = reaction.changed ? '' : 'none' + const hasChanged = reaction.changed ? '' : 'none'; const titleTooltip = `Created at: ${reaction.created_at} \n Updated at: ${reaction.updated_at}`; const { currentCollection } = UIStore.getState(); @@ -281,15 +319,16 @@ export default class ReactionDetails extends Component { ); - return (
{titleTooltip}}> -  {reaction.title()} +  {reaction.title()} - Save and Close Reaction}> + Save and Close Reaction} + > - Save Reaction}> - {copyBtn} @@ -335,59 +380,31 @@ export default class ReactionDetails extends Component { className="button-right" disabled={reaction.changed || reaction.isNew} title={(reaction.changed || reaction.isNew) ? - "Report can be generated after reaction is saved." - : "Generate report for this reaction"} + 'Report can be generated after reaction is saved.' + : 'Generate report for this reaction'} onClick={() => Utils.downloadFile({ - contents: "/api/v1/reports/docx?id=" + reaction.id, + contents: `/api/v1/reports/docx?id=${reaction.id}`, name: reaction.name })} > -
+
{colLabel} - +
- +
); } - handleSelect(key) { - UIActions.selectTab({tabKey: key, type: 'reaction'}); + handleSelect = (key) => { + UIActions.selectTab({ tabKey: key, type: 'reaction' }); this.setState({ activeTab: key - }) - } - - onTabPositionChanged(visible) { - this.setState({ visible }) - } - - updateReactionSvg() { - const { reaction } = this.state; - const materialsSvgPaths = { - starting_materials: reaction.starting_materials.map(material => material.svgPath), - reactants: reaction.reactants.map(material => material.svgPath), - products: reaction.products.map(material => [material.svgPath, material.equivalent]) - }; - - const solvents = reaction.solvents.map((s) => { - const name = s.preferred_label; - return name; - }).filter(s => s); - - let temperature = reaction.temperature_display; - if (/^[\-|\d]\d*\.{0,1}\d{0,2}$/.test(temperature)) { - temperature = `${temperature} ${reaction.temperature.valueUnit}`; - } - - ReactionSvgFetcher.fetchByMaterialsSvgPaths(materialsSvgPaths, temperature, solvents, reaction.duration, reaction.conditions).then((result) => { - reaction.reaction_svg_file = result.reaction_svg; - this.setState(reaction); }); - } + }; handleSegmentsChange(se) { const { reaction } = this.state; @@ -404,12 +421,12 @@ export default class ReactionDetails extends Component { const { visible } = this.state; const tabContentsMap = { scheme: ( - + this.handleReactionChange(reaction, options)} + onReactionChange={(r, options) => this.handleReactionChange(r, options)} onInputChange={(type, event) => this.handleInputChange(type, event)} - /> + /> ), properties: ( @@ -448,7 +465,7 @@ export default class ReactionDetails extends Component { const tabTitlesMap = { green_chemistry: 'Green Chemistry' - } + }; for (let j = 0; j < XTabs.count; j += 1) { if (XTabs[`on${j}`](reaction)) { @@ -478,19 +495,20 @@ export default class ReactionDetails extends Component { const activeTab = (this.state.activeTab !== 0 && this.state.activeTab) || visible[0]; return ( - + {this.reactionHeader(reaction)} - {this.reactionSVG(reaction)} + {this.reactionSVG()} - {this.state.sfn ? : null} - + {tabContents}
@@ -510,6 +528,6 @@ export default class ReactionDetails extends Component { } ReactionDetails.propTypes = { - reaction: PropTypes.object, - toggleFullScreen: PropTypes.func, -} + reaction: PropTypes.object.isRequired, + toggleFullScreen: PropTypes.func.isRequired, +}; diff --git a/app/packs/src/components/SampleDetails.js b/app/packs/src/components/SampleDetails.js index c746fd1cd2..3820fe5602 100644 --- a/app/packs/src/components/SampleDetails.js +++ b/app/packs/src/components/SampleDetails.js @@ -1,4 +1,3 @@ -/* eslint-disable react/forbid-prop-types */ /* eslint-disable no-param-reassign */ import React from 'react'; import PropTypes from 'prop-types'; @@ -6,7 +5,7 @@ import { Button, ButtonToolbar, InputGroup, FormGroup, FormControl, Panel, ListGroup, ListGroupItem, Glyphicon, Tabs, Tab, Row, Col, - Tooltip, OverlayTrigger, DropdownButton, MenuItem, + Tooltip, OverlayTrigger, ControlLabel, Modal, Alert, Checkbox } from 'react-bootstrap'; import SVG from 'react-inlinesvg'; @@ -18,7 +17,6 @@ import classNames from 'classnames'; import Immutable from 'immutable'; import ElementActions from './actions/ElementActions'; -import ElementStore from './stores/ElementStore'; import DetailActions from './actions/DetailActions'; import LoadingActions from './actions/LoadingActions'; @@ -40,7 +38,6 @@ import XTabs from './extra/SampleDetailsXTabs'; import StructureEditorModal from './structure_editor/StructureEditorModal'; import Sample from './models/Sample'; -import Container from './models/Container'; import PolymerSection from './PolymerSection'; import ElementalCompositionGroup from './ElementalCompositionGroup'; import ToggleSection from './common/ToggleSection'; @@ -49,13 +46,11 @@ import ClipboardCopyText from './common/ClipboardCopyText'; import SampleForm from './SampleForm'; import ComputedPropsContainer from './computed_props/ComputedPropsContainer'; import ComputedPropLabel from './computed_props/ComputedPropLabel'; -import Utils from './utils/Functions'; import PrintCodeButton from './common/PrintCodeButton'; import SampleDetailsLiteratures from './DetailsTabLiteratures'; import MoleculesFetcher from './fetchers/MoleculesFetcher'; import PubchemLcss from './PubchemLcss'; import QcMain from './qc/QcMain'; -import { chmoConversions } from './OlsComponent'; import ConfirmClose from './common/ConfirmClose'; import { EditUserLabels, ShowUserLabels } from './UserLabels'; import CopyElementModal from './common/CopyElementModal'; @@ -85,9 +80,13 @@ const decoupleCheck = (sample) => { }; const rangeCheck = (field, sample) => { - if (sample[`${field}_lowerbound`] && sample[`${field}_lowerbound`] !== '' - && sample[`${field}_upperbound`] && sample[`${field}_upperbound`] !== '' - && Number.parseFloat(sample[`${field}_upperbound`]) < Number.parseFloat(sample[`${field}_lowerbound`])) { + const lowerbound = sample[`${field}_lowerbound`]; + const upperbound = sample[`${field}_upperbound`]; + if (lowerbound + && lowerbound !== '' + && upperbound + && upperbound !== '' + && Number.parseFloat(upperbound) < Number.parseFloat(lowerbound)) { NotificationActions.add({ title: `Error on ${field.replace(/(^\w{1})|(_{1}\w{1})/g, match => match.toUpperCase())}`, message: 'range lower bound must be less than or equal to range upper', level: 'error', position: 'tc' }); @@ -97,6 +96,22 @@ const rangeCheck = (field, sample) => { return true; }; +const extraLabels = () => { + const labels = []; + for (let j = 0; j < XLabels.count; j += 1) { + labels.push(XLabels[`content${j}`]); + } + return labels; +}; + +const clipboardTooltip = () => ( + copy to clipboard +); + +const moleculeCreatorTooltip = () => ( + create molecule +); + export default class SampleDetails extends React.Component { constructor(props) { super(props); @@ -152,8 +167,8 @@ export default class SampleDetails extends React.Component { // eslint-disable-next-line camelcase UNSAFE_componentWillReceiveProps(nextProps) { - if (nextProps.sample.isNew && (typeof (nextProps.sample.molfile) === 'undefined' || ((nextProps.sample.molfile || '').length === 0)) - || (typeof (nextProps.sample.molfile) !== 'undefined' && nextProps.sample.molecule.inchikey == 'DUMMY')) { + if ((nextProps.sample.isNew && (typeof (nextProps.sample.molfile) === 'undefined' || ((nextProps.sample.molfile || '').length === 0))) + || (typeof (nextProps.sample.molfile) !== 'undefined' && nextProps.sample.molecule.inchikey === 'DUMMY')) { this.setState({ smileReadonly: false, }); @@ -182,6 +197,17 @@ export default class SampleDetails extends React.Component { } } + onCasSelectOpen(e, casArr) { + if (casArr.length === 0) { + this.setState({ isCasLoading: true }); + DetailActions.getMoleculeCas(this.state.sample); + } + } + + onTabPositionChanged(visible) { + this.setState({ visible }); + } + handleMolfileShow() { this.setState({ showMolfileModal: true @@ -214,7 +240,7 @@ export default class SampleDetails extends React.Component { }); } - showStructureEditor() { + showStructureEditor = () => { this.setState({ showStructureEditor: true }); @@ -300,7 +326,7 @@ export default class SampleDetails extends React.Component { } } - handleStructureEditorSave(molfile, svg_file = null, config = null, editor = 'ketcher') { + handleStructureEditorSave = (molfile, svg_file = null, config = null, editor = 'ketcher') => { const { sample } = this.state; sample.molfile = molfile; const smiles = (config && sample.molecule) ? config.smiles : null; @@ -316,6 +342,7 @@ export default class SampleDetails extends React.Component { sample, smileReadonly: true, pageMessage: result.ob_log, loadingMolecule: false }); }).catch((errorMessage) => { + // eslint-disable-next-line no-alert alert('Cannot create molecule!'); console.log(`handleStructureEditorSave exception of fetchByMolfile: ${errorMessage}`); }); @@ -323,6 +350,7 @@ export default class SampleDetails extends React.Component { MoleculesFetcher.fetchBySmi(smiles, svg_file, molfile, editor) .then((result) => { if (!result || result == null) { + // eslint-disable-next-line no-alert alert('Cannot create molecule!'); } else { sample.molecule = result; @@ -332,6 +360,7 @@ export default class SampleDetails extends React.Component { }); } }).catch((errorMessage) => { + // eslint-disable-next-line no-alert alert('Cannot create molecule!'); console.log(`handleStructureEditorSave exception of fetchBySmi: ${errorMessage}`); }); @@ -339,7 +368,7 @@ export default class SampleDetails extends React.Component { this.hideStructureEditor(); } - handleStructureEditorCancel() { + handleStructureEditorCancel = () => { this.hideStructureEditor(); } @@ -377,7 +406,7 @@ export default class SampleDetails extends React.Component { structureEditorButton(isDisabled) { return ( // eslint-disable-next-line react/jsx-no-bind - ); @@ -388,92 +417,43 @@ export default class SampleDetails extends React.Component { if (this.state.loadingMolecule) { svgPath = '/images/wild_card/loading-bubbles.svg'; } else { - svgPath = sample.svgPath; + ({ svgPath } = sample); } - let className = svgPath ? 'svg-container' : 'svg-container-empty' + const className = svgPath ? 'svg-container' : 'svg-container-empty'; return ( sample.can_update - ?
- - + ? ( + - :
- + ) + : ( +
+
+ ) ); } - sampleAverageMW(sample) { - let mw = sample.molecule_molecular_weight; - if(mw) - return ; - else - return ''; + sampleAverageMW() { + const mw = this.state.sample.molecule_molecular_weight; + if (mw) { return ; } + return ''; } - sampleExactMW(sample) { - let mw = sample.molecule_exact_molecular_weight - if(mw) - return ; - else - return ''; + sampleExactMW() { + const mw = this.state.sample.molecule_exact_molecular_weight; + if (mw) { return ; } + return ''; } - initiateAnalysisButton(sample) { - return ( -
- - this.initiateAnalysisWithKind(sample, chmoConversions.nmr_1h.termId)}>{chmoConversions.nmr_1h.label} - this.initiateAnalysisWithKind(sample, chmoConversions.nmr_13c.termId)}>{chmoConversions.nmr_13c.label} - this.initiateAnalysisWithKind(sample, 'Others')}>others - this.initiateAnalysisWithKind(sample, 'Others2x')}>others 2x - this.initiateAnalysisWithKind(sample, 'Others3x')}>others 3x - -
- ); - } - - initiateAnalysisWithKind(sample, kind) { - let analysis = ''; - let a1 = Container.buildAnalysis(chmoConversions.others.value), - a2 = Container.buildAnalysis(chmoConversions.others.value), - a3 = Container.buildAnalysis(chmoConversions.others.value); - switch(kind) { - case chmoConversions.nmr_1h.termId: - analysis = Container.buildAnalysis(chmoConversions.nmr_1h.value); - sample.addAnalysis(analysis); - ElementActions.updateSample(sample); - Utils.downloadFile({contents: "/api/v1/code_logs/print_analyses_codes?sample_id=" + sample.id + "&analyses_ids[]=" + analysis.id + "&type=nmr_analysis&size=small"}) - break; - case chmoConversions.nmr_13c.termId: - analysis = Container.buildAnalysis(chmoConversions.nmr_13c.value); - sample.addAnalysis(analysis); - ElementActions.updateSample(sample); - Utils.downloadFile({ contents: "/api/v1/code_logs/print_analyses_codes?sample_id=" + sample.id + "&analyses_ids[]=" + analysis.id + "&type=nmr_analysis&size=small" }) - break; - case "Others": - sample.addAnalysis(a1); - ElementActions.updateSample(sample); - Utils.downloadFile({contents: "/api/v1/code_logs/print_analyses_codes?sample_id=" + sample.id + "&analyses_ids[]=" + a1.id + "&type=analysis&size=small"}) - break; - case "Others2x": - sample.addAnalysis(a1); - sample.addAnalysis(a2); - ElementActions.updateSample(sample); - Utils.downloadFile({contents: "/api/v1/code_logs/print_analyses_codes?sample_id=" + sample.id + "&analyses_ids[]=" + a1.id + "&analyses_ids[]=" + a2.id + "&type=analysis&size=small"}) - break; - case "Others3x": - sample.addAnalysis(a1); - sample.addAnalysis(a2); - sample.addAnalysis(a3); - ElementActions.updateSample(sample); - Utils.downloadFile({contents: "/api/v1/code_logs/print_analyses_codes?sample_id=" + sample.id + "&analyses_ids[]=" + a1.id + "&analyses_ids[]=" + a2.id + "&analyses_ids[]=" + a3.id + "&type=analysis&size=small"}) - break; - } - } - - sampleHeader(sample) { + sampleHeader() { + const { sample } = this.state; const saveBtnDisplay = sample.isEdited ? '' : 'none'; const titleTooltip = `Created at: ${sample.created_at} \n Updated at: ${sample.updated_at}`; @@ -557,37 +537,33 @@ export default class SampleDetails extends React.Component { {colLabel} - {this.extraLabels().map((Lab, i) => )} + { + // eslint-disable-next-line react/no-array-index-key + extraLabels().map((Lab, i) => ) + }
); } - transferToDeviceButton(sample) { - return ( - - ) - } - - sampleInfo(sample) { + sampleInfo() { + const { sample } = this.state; const style = { height: '200px' }; - let pubchemLcss = (sample.pubchem_tag && sample.pubchem_tag.pubchem_lcss && sample.pubchem_tag.pubchem_lcss.Record) || null; + let pubchemLcss = ( + sample.pubchem_tag && + sample.pubchem_tag.pubchem_lcss && + sample.pubchem_tag.pubchem_lcss.Record + ) || null; if (pubchemLcss && pubchemLcss.Reference) { const echa = pubchemLcss.Reference.filter(e => e.SourceName === 'European Chemicals Agency (ECHA)').map(e => e.ReferenceNumber); if (echa.length > 0) { - pubchemLcss = pubchemLcss.Section.find(e => e.TOCHeading === 'Safety and Hazards') || []; - pubchemLcss = pubchemLcss.Section.find(e => e.TOCHeading === 'Hazards Identification') || []; - pubchemLcss = pubchemLcss.Section[0].Information.filter(e => echa.includes(e.ReferenceNumber)) || null; + pubchemLcss = pubchemLcss.Section.find(e => + e.TOCHeading === 'Safety and Hazards') || []; + pubchemLcss = pubchemLcss.Section.find(e => + e.TOCHeading === 'Hazards Identification') || []; + pubchemLcss = pubchemLcss.Section[0].Information.filter(e => + echa.includes(e.ReferenceNumber)) || null; } else pubchemLcss = null; } const pubchemCid = sample.pubchem_tag && sample.pubchem_tag.pubchem_cid ? @@ -599,8 +575,8 @@ export default class SampleDetails extends React.Component {

-
{this.sampleAverageMW(sample)}
-
{this.sampleExactMW(sample)}
+
{this.sampleAverageMW()}
+
{this.sampleExactMW()}
{lcssSign} @@ -612,7 +588,7 @@ export default class SampleDetails extends React.Component { moleculeInchi(sample) { if (typeof (this.inchistringInput) !== 'undefined' && this.inchistringInput - && typeof (sample.molecule_inchistring) !== 'undefined' && sample.molecule_inchistring) { + && typeof (sample.molecule_inchistring) !== 'undefined' && sample.molecule_inchistring) { this.inchistringInput.value = sample.molecule_inchistring; } const inchiLabel = this.state.showInchikey ? 'InChIKey' : 'InChI'; @@ -642,7 +618,7 @@ export default class SampleDetails extends React.Component { /> - + @@ -652,22 +628,10 @@ export default class SampleDetails extends React.Component { ); } - clipboardTooltip() { - return( - copy to clipboard - ) - } - - moleculeCreatorTooltip() { - return( - create molecule - ) - } - moleculeCanoSmiles(sample) { if (this.state.smileReadonly && typeof (this.smilesInput) !== 'undefined' - && this.smilesInput && typeof (sample.molecule_cano_smiles) !== 'undefined' - && sample.molecule_cano_smiles) { + && this.smilesInput && typeof (sample.molecule_cano_smiles) !== 'undefined' + && sample.molecule_cano_smiles) { this.smilesInput.value = sample.molecule_cano_smiles; } return ( @@ -684,14 +648,14 @@ export default class SampleDetails extends React.Component { /> - + - + @@ -740,7 +704,7 @@ export default class SampleDetails extends React.Component { - ) + ); } addManualCas(e) { @@ -774,7 +738,7 @@ export default class SampleDetails extends React.Component { disabled={!sample.can_update} /> - + @@ -783,19 +747,11 @@ export default class SampleDetails extends React.Component { } updateCas(e) { - let sample = this.state.sample; + const { sample } = this.state; sample.xref = { ...sample.xref, cas: e }; this.setState({ sample }); } - onCasSelectOpen(e, casArr) { - if(casArr.length === 0) { - this.setState({isCasLoading: true}) - DetailActions.getMoleculeCas(this.state.sample) - } - } - - handleSegmentsChange(se) { const { sample } = this.state; const { segments } = sample; @@ -806,6 +762,7 @@ export default class SampleDetails extends React.Component { } customizableField() { + /* eslint-disable camelcase */ const { xref } = this.state.sample; const { cas, @@ -830,6 +787,7 @@ export default class SampleDetails extends React.Component { )) ); + /* eslint-enable camelcase */ } updateKey(key, e) { @@ -864,7 +822,7 @@ export default class SampleDetails extends React.Component { return ( this.handleElementalSectionToggle()}> - + {label}
@@ -924,7 +882,7 @@ export default class SampleDetails extends React.Component { this.handleChemIdentSectionToggle()}> Chemical identifiers - { sample.decoupled && + {sample.decoupled &&  [decoupled] @@ -969,7 +927,7 @@ export default class SampleDetails extends React.Component { const sample = this.state.sample || {}; return ( - + + {this.setState(sample)}} + setState={(updatedSample) => { this.setState(updatedSample); }} handleSampleChanged={this.handleSampleChanged} handleSubmit={this.handleSubmit} fromSample @@ -1022,7 +980,7 @@ export default class SampleDetails extends React.Component { } sampleImportReadoutTab(ind) { - let sample = this.state.sample || {}; + const sample = this.state.sample || {}; return ( ; const title = ( @@ -1136,17 +1094,9 @@ export default class SampleDetails extends React.Component { ); } - extraLabels() { - let labels = []; - for (let j = 0; j < XLabels.count; j += 1) { - labels.push(XLabels[`content${j}`]); - } - return labels; - } - sampleIsValid() { const { sample, loadingMolecule, quickCreator } = this.state; - return (sample.isValid && !loadingMolecule) || sample.is_scoped == true || quickCreator; + return (sample.isValid && !loadingMolecule) || sample.is_scoped === true || quickCreator; } saveBtn(sample, closeView = false) { @@ -1178,7 +1128,7 @@ export default class SampleDetails extends React.Component { const belongToReaction = sample.belongTo && sample.belongTo.type === 'reaction'; const hasAnalyses = !!(sample.analyses && sample.analyses.length > 0); const downloadAnalysesBtn = (sample.isNew || !hasAnalyses) ? null : ( - ); @@ -1196,16 +1146,17 @@ export default class SampleDetails extends React.Component { ); } - structureEditorModal(sample) { - const molfile = sample.molfile; + structureEditorModal() { + const { sample } = this.state; + const { molfile } = sample; const hasParent = sample && sample.parent_id; const hasChildren = sample && sample.children_count > 0; return ( '); return ( { this.molfileInputModal = m; }} defaultValue={this.molfileInput.value || ''} /> @@ -1263,10 +1212,6 @@ export default class SampleDetails extends React.Component { return (
); } - onTabPositionChanged(visible) { - this.setState({ visible }); - } - render() { const sample = this.state.sample || {}; const { visible } = this.state; @@ -1339,29 +1284,29 @@ export default class SampleDetails extends React.Component {
{ - pageMessage.error.map(m => ( -
{m}
- )) - } + pageMessage.error.map(m => ( +
{m}
+ )) + } { - pageMessage.warning.map(m => ( -
{m}
- )) - } + pageMessage.warning.map(m => ( +
{m}
+ )) + } ) : null; const activeTab = (this.state.activeTab !== 0 && stb.indexOf(this.state.activeTab) > -1 && - this.state.activeTab) || visible.get(0); + this.state.activeTab) || visible.get(0); return ( - {this.sampleHeader(sample)}{messageBlock} + {this.sampleHeader()}{messageBlock} - {this.sampleInfo(sample)} + {this.sampleInfo()} {this.sampleFooter()} - {this.structureEditorModal(sample)} + {this.structureEditorModal()} {this.renderMolfileModal()} - ) + ); } } SampleDetails.propTypes = { - sample: PropTypes.object, - toggleFullScreen: PropTypes.func, + sample: PropTypes.object.isRequired, + toggleFullScreen: PropTypes.func.isRequired, }; From 8813eaf918419068fce3022c3985df716b045936 Mon Sep 17 00:00:00 2001 From: Martin Schneider Date: Thu, 18 Nov 2021 08:44:45 +0100 Subject: [PATCH 3/3] Add a change history /w logidze Store a history which keeps track of every change made to certain entities and add a "History" tab to the Reaction/Sample detail pages. Related entities are displayed in the version history. E.g. Reaction -> ReactionsSample Save timestamp and author of the version with the changes. A change diff to the previous version can be displayed. Tracked entities: - Attachment - Container - ElementalComposition - Reaction - ReactionsSample - Residue - Sample Co-authored-by: VadimKeller --- Gemfile | 2 + Gemfile.lock | 6 + app/api/api.rb | 3 + app/api/chemotion/version_api.rb | 126 ++++++ app/api/modules/logidze_module.rb | 21 + app/assets/stylesheets/version.scss | 24 ++ app/models/attachment.rb | 6 + app/models/concerns/versionable.rb | 203 +++++++++ app/models/container.rb | 1 + app/models/elemental_composition.rb | 2 + app/models/reaction.rb | 1 + app/models/reactions_product_sample.rb | 2 + .../reactions_purification_solvent_sample.rb | 2 + app/models/reactions_reactant_sample.rb | 2 + app/models/reactions_sample.rb | 4 + app/models/reactions_solvent_sample.rb | 2 + .../reactions_starting_material_sample.rb | 2 + app/models/residue.rb | 1 + app/models/sample.rb | 1 + app/packs/src/components/ReactionDetails.js | 23 +- app/packs/src/components/SampleDetails.js | 29 +- app/packs/src/components/VersionsTable.js | 137 ++++++ .../src/components/VersionsTableChanges.js | 67 +++ app/packs/src/components/VersionsTableTime.js | 29 ++ .../components/fetchers/VersionsFetcher.js | 27 ++ app/packs/src/components/models/Change.js | 11 + app/packs/src/components/models/Version.js | 16 + config/initializers/logidze.rb | 24 ++ config/profile_default.yml.example | 8 + .../logidze_capture_exception_v01.sql | 23 + db/functions/logidze_compact_history_v01.sql | 38 ++ db/functions/logidze_filter_keys_v01.sql | 27 ++ db/functions/logidze_logger_v02.sql | 203 +++++++++ db/functions/logidze_snapshot_v03.sql | 33 ++ db/functions/logidze_version_v02.sql | 21 + db/migrate/20210928095129_logidze_install.rb | 64 +++ .../20210928095414_add_logidze_to_samples.rb | 24 ++ ...20210928095425_add_logidze_to_reactions.rb | 24 ++ ...309_add_timestamps_to_reactions_samples.rb | 7 + ...131722_add_logidze_to_reactions_samples.rb | 24 ++ .../20211111154822_add_logidze_to_residues.rb | 24 ++ ...6_add_logidze_to_elemental_compositions.rb | 24 ++ ...211112121704_add_logidze_to_attachments.rb | 24 ++ ...0211112121733_add_logidze_to_containers.rb | 24 ++ db/schema.rb | 398 ++++++++++++++++++ db/triggers/logidze_on_attachments_v01.sql | 6 + db/triggers/logidze_on_containers_v01.sql | 6 + .../logidze_on_elemental_compositions_v01.sql | 6 + .../logidze_on_reactions_samples_v01.sql | 6 + db/triggers/logidze_on_reactions_v01.sql | 6 + db/triggers/logidze_on_residues_v01.sql | 6 + db/triggers/logidze_on_samples_v01.sql | 6 + package.json | 1 + yarn.lock | 26 +- 54 files changed, 1825 insertions(+), 8 deletions(-) create mode 100644 app/api/chemotion/version_api.rb create mode 100644 app/api/modules/logidze_module.rb create mode 100644 app/assets/stylesheets/version.scss create mode 100644 app/models/concerns/versionable.rb create mode 100644 app/packs/src/components/VersionsTable.js create mode 100644 app/packs/src/components/VersionsTableChanges.js create mode 100644 app/packs/src/components/VersionsTableTime.js create mode 100644 app/packs/src/components/fetchers/VersionsFetcher.js create mode 100644 app/packs/src/components/models/Change.js create mode 100644 app/packs/src/components/models/Version.js create mode 100644 config/initializers/logidze.rb create mode 100644 db/functions/logidze_capture_exception_v01.sql create mode 100644 db/functions/logidze_compact_history_v01.sql create mode 100644 db/functions/logidze_filter_keys_v01.sql create mode 100644 db/functions/logidze_logger_v02.sql create mode 100644 db/functions/logidze_snapshot_v03.sql create mode 100644 db/functions/logidze_version_v02.sql create mode 100644 db/migrate/20210928095129_logidze_install.rb create mode 100644 db/migrate/20210928095414_add_logidze_to_samples.rb create mode 100644 db/migrate/20210928095425_add_logidze_to_reactions.rb create mode 100644 db/migrate/20211021120309_add_timestamps_to_reactions_samples.rb create mode 100644 db/migrate/20211021131722_add_logidze_to_reactions_samples.rb create mode 100644 db/migrate/20211111154822_add_logidze_to_residues.rb create mode 100644 db/migrate/20211111154836_add_logidze_to_elemental_compositions.rb create mode 100644 db/migrate/20211112121704_add_logidze_to_attachments.rb create mode 100644 db/migrate/20211112121733_add_logidze_to_containers.rb create mode 100644 db/triggers/logidze_on_attachments_v01.sql create mode 100644 db/triggers/logidze_on_containers_v01.sql create mode 100644 db/triggers/logidze_on_elemental_compositions_v01.sql create mode 100644 db/triggers/logidze_on_reactions_samples_v01.sql create mode 100644 db/triggers/logidze_on_reactions_v01.sql create mode 100644 db/triggers/logidze_on_residues_v01.sql create mode 100644 db/triggers/logidze_on_samples_v01.sql diff --git a/Gemfile b/Gemfile index 607f505ccf..54ec587109 100644 --- a/Gemfile +++ b/Gemfile @@ -111,6 +111,8 @@ gem 'whenever', require: false gem 'yaml_db' +gem 'logidze' + group :development do gem 'better_errors' # allows to debug exception on backend from browser diff --git a/Gemfile.lock b/Gemfile.lock index efb39c3ecb..c374ea1f87 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -429,6 +429,10 @@ GEM listen (3.3.1) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) + logidze (1.2.0) + activerecord (>= 5.0) + railties (>= 5.0) + ruby-next-core (~> 0.9) loofah (2.17.0) crass (~> 1.0.2) nokogiri (>= 1.5.9) @@ -641,6 +645,7 @@ GEM activesupport memoist ruby-mailchecker (3.2.29) + ruby-next-core (0.13.1) ruby-ole (1.2.12.2) ruby-progressbar (1.10.1) ruby2_keywords (0.0.2) @@ -828,6 +833,7 @@ DEPENDENCIES ketcherails! launchy (~> 2.4.3) listen + logidze memory_profiler meta_request net-sftp diff --git a/app/api/api.rb b/app/api/api.rb index 1bfee02c21..b0b8e2c528 100644 --- a/app/api/api.rb +++ b/app/api/api.rb @@ -3,6 +3,8 @@ require 'grape-swagger' class API < Grape::API + include LogidzeModule + format :json prefix :api version 'v1' @@ -143,6 +145,7 @@ def to_json_camel_case(val) mount Chemotion::NmrdbAPI mount Chemotion::MeasurementsAPI mount Chemotion::ConverterAPI + mount Chemotion::VersionAPI add_swagger_documentation(info: { "title": "Chemotion ELN", diff --git a/app/api/chemotion/version_api.rb b/app/api/chemotion/version_api.rb new file mode 100644 index 0000000000..d9080ca990 --- /dev/null +++ b/app/api/chemotion/version_api.rb @@ -0,0 +1,126 @@ +require 'open-uri' + +module Chemotion + class VersionAPI < Grape::API + include Grape::Kaminari + helpers ParamsHelpers + + namespace :versions do + resource :samples do + desc 'Return versions of the given sample' + + params do + requires :id, type: Integer, desc: 'Sample id' + end + + paginate per_page: 10, offset: 0, max_per_page: 100 + + route_param :id do + get do + # find specific sample and load only required data + sample = Sample.select(:id, :name, :log_data, :updated_at).find(params[:id]) + + analyses = sample.analyses.flat_map { |analysis| analysis.self_and_descendants.select(:id, :name, :updated_at, :log_data) } + + # create cache key for sample + timestamp = [ + sample.updated_at, + analyses.map(&:updated_at).max, + Attachment.where(attachable_id: analyses.map(&:id), attachable_type: 'Container').maximum(:updated_at) + ].reject(&:nil?).max.to_i + cache_key = "versions/samples/#{sample.id}/#{timestamp}" + + # cache processed and sorted versions to speed up pagination + versions = Rails.cache.fetch cache_key do + all_versions = sample.versions_hash + all_versions += sample.residues.select(:sample_id, :log_data).flat_map do |residue| + residue.versions_hash(sample.name) + end + all_versions += sample.elemental_compositions.select(:sample_id, :log_data).flat_map do |elemental_composition| + elemental_composition.versions_hash(sample.name) + end + + analyses.each do |analysis| + all_versions += analysis.versions_hash + all_versions += analysis.attachments.select(:attachable_id, :attachable_type, :filename, :log_data).flat_map do |attachment| + attachment.versions_hash(attachment.filename) + end + end + + all_versions.sort_by! { |version| -version['t'].to_i } # sort versions with the latest changes in the first place + .each_with_index { |record, index| record['v'] = all_versions.length - index } # adjust v to be uniq and in right order + end + + { versions: paginate(Kaminari.paginate_array(versions)) } + end + end + end + + resource :reactions do + desc 'Return versions of the given reaction' + + params do + requires :id, type: Integer, desc: 'Reaction id' + end + + paginate per_page: 10, offset: 0, max_per_page: 100 + + route_param :id do + get do + # find specific sample and load only required data + reaction = Reaction.select(:id, :name, :log_data, :updated_at).find(params[:id]) + + analyses = ( + reaction.analyses + + reaction.samples.includes(:container).pluck('containers.id').flat_map { |container_id| Container.analyses_for_root(container_id) } + ).flat_map { |analysis| analysis.self_and_descendants.select(:id, :name, :updated_at, :log_data) } + + # create cache key for reaction + timestamp = [ + reaction.updated_at, + reaction.samples.with_deleted.maximum(:updated_at), + reaction.reactions_samples.with_deleted.maximum(:updated_at), + analyses.map(&:updated_at).max, + Attachment.where(attachable_id: analyses.map(&:id), attachable_type: 'Container').maximum(:updated_at) + ].reject(&:nil?).max.to_i + cache_key = "versions/reactions/#{reaction.id}/#{timestamp}" + + # cache processed and sorted versions of all reaction dependent records and merge them into one list to speed up pagination + versions = Rails.cache.fetch cache_key do + all_versions = reaction.versions_hash + + analyses.each do |analysis| + all_versions += analysis.versions_hash + all_versions += analysis.attachments.select(:attachable_id, :attachable_type, :filename, :log_data).flat_map do |attachment| + attachment.versions_hash(attachment.filename) + end + end + + samples = reaction.samples.with_deleted.select('samples.id, samples.name, samples.log_data') + samples.each do |sample| + all_versions += sample.versions_hash + all_versions += sample.residues.select(:sample_id, :log_data).flat_map do |residue| + residue.versions_hash(sample.name) + end + all_versions += sample.elemental_compositions.select(:sample_id, :log_data).flat_map do |elemental_composition| + elemental_composition.versions_hash(sample.name) + end + end + + reactions_samples = reaction.reactions_samples.with_deleted.select(:sample_id, :log_data, :type) + all_versions += reactions_samples.flat_map do |reactions_sample| + sample = samples.detect { |s| s.id == reactions_sample.sample_id } + reactions_sample.versions_hash(sample.name) + end + + all_versions.sort_by! { |version| -version['t'].to_i } # sort versions with the latest changes in the first place + .each_with_index { |record, index| record['v'] = all_versions.length - index } # adjust v to be uniq and in right order + end + + { versions: paginate(Kaminari.paginate_array(versions)) } + end + end + end + end + end +end diff --git a/app/api/modules/logidze_module.rb b/app/api/modules/logidze_module.rb new file mode 100644 index 0000000000..b590a87eab --- /dev/null +++ b/app/api/modules/logidze_module.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +# A helper to help logidize track current_user +module LogidzeModule + extend ActiveSupport::Concern + + included do + before do + if current_user.present? && request.request_method.in?(%w[PATCH POST PUT DELETE]) + @logidze_meta_set ||= begin + Logidze.with_responsible!(current_user.id) + true + end + end + end + + after do + Logidze.clear_responsible! if @logidze_meta_set.present? + end + end +end \ No newline at end of file diff --git a/app/assets/stylesheets/version.scss b/app/assets/stylesheets/version.scss new file mode 100644 index 0000000000..65cef6b42d --- /dev/null +++ b/app/assets/stylesheets/version.scss @@ -0,0 +1,24 @@ +.row.row-version-history { // overwrite bootstrap + display: flex; + flex-wrap: wrap; + font-size: 10px; + margin: 0; + word-break: break-all; + + & + .row-version-history { + margin-top: 15px; + } + + & > [class*="col-"] { + padding: 10px; + } + + .ql-editor { + padding: 0; + } + + .ql-tooltip.ql-hidden { + height: 0; + padding-top: 10px; + } +} \ No newline at end of file diff --git a/app/models/attachment.rb b/app/models/attachment.rb index 3854be2faf..3f9a8d2c6d 100644 --- a/app/models/attachment.rb +++ b/app/models/attachment.rb @@ -30,6 +30,8 @@ class Attachment < ApplicationRecord + include Versionable + include AttachmentJcampAasm include AttachmentJcampProcess include AttachmentConverter @@ -52,6 +54,10 @@ class Attachment < ApplicationRecord belongs_to :attachable, polymorphic: true, optional: true has_one :report_template +<<<<<<< HEAD +======= + +>>>>>>> Add a change history /w logidze scope :where_research_plan, lambda { |c_id| where(attachable_id: c_id, attachable_type: 'ResearchPlan') diff --git a/app/models/concerns/versionable.rb b/app/models/concerns/versionable.rb new file mode 100644 index 0000000000..2c3e797788 --- /dev/null +++ b/app/models/concerns/versionable.rb @@ -0,0 +1,203 @@ +# frozen_string_literal: true + +# Versionable module +module Versionable + extend ActiveSupport::Concern + + BLACKLISTED_ATTRIBUTES = %w[ + id + created_at + updated_at + parent_id + parent_type + container_type + attachable_id + attachable_type + sample_id + reaction_id + molecule_id + molecule_name_id + type + ].freeze + + included do + has_logidze + end + + def versions_hash(record_name = name) + return [] if log_data.nil? + + result = [] # result data + base = {} # track current version data + log_data.versions.each do |version| + changes = version.changes # get changes for current version + changes_comparison_hash = {} # hash for changes comparison + changes.each do |key, value| + next if key.in?(BLACKLISTED_ATTRIBUTES) # ignore uneeded keys + next if value == base[key] # ignore if value is same as in last version + next if base[key].blank? && value.blank? # ignore if value is empty or nil + + # parse value if needed + old_value = version_value(key, base[key]) + new_value = version_value(key, value) + + if old_value.is_a?(Hash) || new_value.is_a?(Hash) + # fix nil cases + old_value ||= {} + new_value ||= {} + base = old_value.merge(new_value) # hash with contains all keys + label = version_label(key, base) # labels for hash + kind = version_kind(key, base) # kinds of hash (numrange, date, string) + + base.each_key do |key| + next if old_value[key] == new_value[key] # ignore if value is same as in last version + next if old_value[key].blank? && new_value[key].blank? # ignore if value is empty or nil + + changes_comparison_hash[key] = { + o: old_value[key], + n: new_value[key], + l: label[key], + k: kind[key] + } + end + else + changes_comparison_hash[key] = { + o: old_value, + n: new_value, + l: version_label(key), # label for attribute, + k: version_kind(key) # kind of attribute (numrange, date, string) + } + end + end + base.merge!(changes) # merge changes with last version data for next iteration + next if changes_comparison_hash.empty? + + result << { + 'k' => version_entity, # record kind (sampe, reaction, ...) + 'n' => record_name, # record name (uses as default the name attribute but in case the model doesn't have a name field or you want to change it) + 't' => Time.at(version.data['ts'] / 1000), # timestamp of the change + 'u' => version_user_names_lookup[version.data.dig('m', '_r')], # user + 'c' => changes_comparison_hash # changes hash + } + end + + result + end + + private + + def version_user_names_lookup + @version_user_names_lookup ||= begin + ids = {} + + log_data.versions.each do |v| + ids[v.data.dig('m', '_r')] ||= 1 + ids[v.changes['created_by']] ||= 1 if v.changes.key?('created_by') + ids[v.changes['created_for']] ||= 1 if v.changes.key?('created_for') + end + + User.with_deleted.where(id: ids.keys).map { |u| [u.id, u.name] }.to_h + end + end + + def version_value(attribute, value) + return if value.nil? + + if self.class.name == 'Reaction' && attribute.in?(%w[description observation]) + YAML.load(value).to_json + elsif attribute.in?(%w[created_by created_for]) + version_user_names_lookup[value] + elsif self.class.name == 'Attachment' && attribute == 'aasm_state' + value.humanize + elsif self.class.name == 'ElementalComposition' && attribute == 'composition_type' + ElementalComposition::TYPES[value.to_sym] + elsif self.class.name == 'Sample' && attribute.in?(%w[boiling_point melting_point]) + value + else + @attributes[attribute].type.deserialize(value) + end + end + + def version_label(attribute, value_hash = {}) + case attribute + when 'timestamp_start' + 'Start' + when 'timestamp_stop' + 'Stop' + when 'observation' + 'Additional information for publication and purification details' + else + if self.class.columns_hash[attribute].type.in?(%i[hstore jsonb]) + label_hash = {} + + value_hash.each_key do |key| + value = if self.class.name == 'Container' && key == 'report' + 'Add to Report' + elsif self.class.name == 'Sample' && attribute == 'stereo' + "#{attribute} #{key}".humanize + else + key.underscore.humanize + end + + label_hash.merge!(key => value) + end + + label_hash + else + attribute.underscore.humanize + end + end + end + + def version_kind(attribute, value_hash = {}) + if value_hash.present? + kind_hash = {} + + value_hash.each_key do |key| + value = if self.class.name == 'Container' && key == 'content' || self.class.name == 'Reaction' && key == 'ops' + :quill + elsif self.class.name == 'Container' && key == 'kind' + :treeselect + else + :string + end + + kind_hash.merge!(key => value) + end + + kind_hash + elsif self.class.name == 'Reaction' && attribute.in?(%w[description observation]) + :quill + elsif self.class.name == 'Reaction' && attribute == 'rxno' + :treeselect + else + case attribute + when 'created_at', 'updated_at', 'deleted_at' + :date + when 'melting_point', 'boiling_point' + :numrange + else + :string + end + end + end + + def version_entity + case self.class.name + when 'ReactionsStartingMaterialSample' + 'Starting material' + when 'ReactionsReactantSample' + 'Reactant' + when 'ReactionsSolventSample' + 'Solvent' + when 'ReactionsPurificationSolventSample' + 'Purification solvent' + when 'ReactionsProductSample' + 'Product' + when 'Container' + 'Analysis' + else + self.class.name.underscore.humanize + end + end +end \ No newline at end of file diff --git a/app/models/container.rb b/app/models/container.rb index e300d393a1..b8e7eae475 100644 --- a/app/models/container.rb +++ b/app/models/container.rb @@ -22,6 +22,7 @@ class Container < ApplicationRecord include ElementCodes include Datasetable + include Versionable belongs_to :containable, polymorphic: true, optional: true has_many :attachments, as: :attachable diff --git a/app/models/elemental_composition.rb b/app/models/elemental_composition.rb index e0c497a15a..b798a40a4b 100644 --- a/app/models/elemental_composition.rb +++ b/app/models/elemental_composition.rb @@ -16,6 +16,8 @@ # class ElementalComposition < ApplicationRecord + include Versionable + belongs_to :sample TYPES = { diff --git a/app/models/reaction.rb b/app/models/reaction.rb index 72f036ecba..d864effec8 100644 --- a/app/models/reaction.rb +++ b/app/models/reaction.rb @@ -50,6 +50,7 @@ class Reaction < ApplicationRecord include Taggable include ReactionRinchi include Segmentable + include Versionable serialize :description, Hash serialize :observation, Hash diff --git a/app/models/reactions_product_sample.rb b/app/models/reactions_product_sample.rb index 50d1beebf5..dbef44391d 100644 --- a/app/models/reactions_product_sample.rb +++ b/app/models/reactions_product_sample.rb @@ -13,6 +13,8 @@ # waste :boolean default(FALSE) # coefficient :float default(1.0) # show_label :boolean default(FALSE), not null +# created_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null +# updated_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null # # Indexes # diff --git a/app/models/reactions_purification_solvent_sample.rb b/app/models/reactions_purification_solvent_sample.rb index aa2c1c9faf..6daf74ccdb 100644 --- a/app/models/reactions_purification_solvent_sample.rb +++ b/app/models/reactions_purification_solvent_sample.rb @@ -13,6 +13,8 @@ # waste :boolean default(FALSE) # coefficient :float default(1.0) # show_label :boolean default(FALSE), not null +# created_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null +# updated_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null # # Indexes # diff --git a/app/models/reactions_reactant_sample.rb b/app/models/reactions_reactant_sample.rb index 958da1b44c..5193d0d063 100644 --- a/app/models/reactions_reactant_sample.rb +++ b/app/models/reactions_reactant_sample.rb @@ -13,6 +13,8 @@ # waste :boolean default(FALSE) # coefficient :float default(1.0) # show_label :boolean default(FALSE), not null +# created_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null +# updated_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null # # Indexes # diff --git a/app/models/reactions_sample.rb b/app/models/reactions_sample.rb index c3c3e64b0b..4399bec5ee 100644 --- a/app/models/reactions_sample.rb +++ b/app/models/reactions_sample.rb @@ -13,6 +13,8 @@ # waste :boolean default(FALSE) # coefficient :float default(1.0) # show_label :boolean default(FALSE), not null +# created_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null +# updated_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null # # Indexes # @@ -21,6 +23,8 @@ # class ReactionsSample < ApplicationRecord + include Versionable + acts_as_paranoid belongs_to :reaction, optional: true belongs_to :sample, optional: true diff --git a/app/models/reactions_solvent_sample.rb b/app/models/reactions_solvent_sample.rb index 947359f815..4f72e05580 100644 --- a/app/models/reactions_solvent_sample.rb +++ b/app/models/reactions_solvent_sample.rb @@ -13,6 +13,8 @@ # waste :boolean default(FALSE) # coefficient :float default(1.0) # show_label :boolean default(FALSE), not null +# created_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null +# updated_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null # # Indexes # diff --git a/app/models/reactions_starting_material_sample.rb b/app/models/reactions_starting_material_sample.rb index cc73df3429..4eef4c7b8c 100644 --- a/app/models/reactions_starting_material_sample.rb +++ b/app/models/reactions_starting_material_sample.rb @@ -13,6 +13,8 @@ # waste :boolean default(FALSE) # coefficient :float default(1.0) # show_label :boolean default(FALSE), not null +# created_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null +# updated_at :datetime default(Fri, 01 Oct 2021 00:00:00 UTC +00:00), not null # # Indexes # diff --git a/app/models/residue.rb b/app/models/residue.rb index 8728d1d23d..8ca53b4a53 100644 --- a/app/models/residue.rb +++ b/app/models/residue.rb @@ -15,6 +15,7 @@ # class Residue < ApplicationRecord + include Versionable belongs_to :sample, optional: true validate :loading_present diff --git a/app/models/sample.rb b/app/models/sample.rb index 70da7a24ec..2f9787b731 100644 --- a/app/models/sample.rb +++ b/app/models/sample.rb @@ -62,6 +62,7 @@ class Sample < ApplicationRecord include UnitConvertable include Taggable include Segmentable + include Versionable STEREO_ABS = ['any', 'rac', 'meso', 'delta', 'lambda', '(S)', '(R)', '(Sp)', '(Rp)', '(Sa)', '(Ra)'].freeze STEREO_REL = ['any', 'syn', 'anti', 'p-geminal', 'p-ortho', 'p-meta', 'p-para', 'cis', 'trans', 'fac', 'mer'].freeze diff --git a/app/packs/src/components/ReactionDetails.js b/app/packs/src/components/ReactionDetails.js index e9ff798681..4f603f66ec 100644 --- a/app/packs/src/components/ReactionDetails.js +++ b/app/packs/src/components/ReactionDetails.js @@ -19,6 +19,7 @@ import ReactionDetailsContainers from './ReactionDetailsContainers'; import SampleDetailsContainers from './SampleDetailsContainers'; import ReactionDetailsScheme from './ReactionDetailsScheme'; import ReactionDetailsProperties from './ReactionDetailsProperties'; +import VersionsTable from './VersionsTable'; import GreenChemistry from './green_chem/GreenChemistry'; import Utils from './utils/Functions'; import PrintCodeButton from './common/PrintCodeButton'; @@ -99,9 +100,9 @@ export default class ReactionDetails extends Component { const nextReaction = nextProps.reaction; if (nextReaction.id !== reaction.id || - nextReaction.updated_at !== reaction.updated_at || - nextReaction.reaction_svg_file !== reaction.reaction_svg_file || - nextReaction.changed || nextReaction.editedSample) { + nextReaction.updated_at !== reaction.updated_at || + nextReaction.reaction_svg_file !== reaction.reaction_svg_file || + nextReaction.changed || nextReaction.editedSample) { this.setState(prevState => ({ ...prevState, reaction: nextReaction })); } } @@ -460,6 +461,14 @@ export default class ReactionDetails extends Component { onReactionChange={this.handleReactionChange} /> + ), + history: ( + + + ) }; @@ -508,7 +517,13 @@ export default class ReactionDetails extends Component { tabTitles={tabTitlesMap} onTabPositionChanged={this.onTabPositionChanged} /> - + {tabContents}
diff --git a/app/packs/src/components/SampleDetails.js b/app/packs/src/components/SampleDetails.js index 3820fe5602..398f9f1dc1 100644 --- a/app/packs/src/components/SampleDetails.js +++ b/app/packs/src/components/SampleDetails.js @@ -51,6 +51,7 @@ import SampleDetailsLiteratures from './DetailsTabLiteratures'; import MoleculesFetcher from './fetchers/MoleculesFetcher'; import PubchemLcss from './PubchemLcss'; import QcMain from './qc/QcMain'; +import VersionsTable from './VersionsTable'; import ConfirmClose from './common/ConfirmClose'; import { EditUserLabels, ShowUserLabels } from './UserLabels'; import CopyElementModal from './common/CopyElementModal'; @@ -1076,6 +1077,22 @@ export default class SampleDetails extends React.Component { ); } + historyTab(ind) { + const { sample } = this.state; + if (!sample) { return null; } + return ( + + + + + + ); + } + nmrSimTab(ind) { const { sample } = this.state; if (!sample) { return null; } @@ -1221,7 +1238,8 @@ export default class SampleDetails extends React.Component { references: this.sampleLiteratureTab(), results: this.sampleImportReadoutTab('results'), qc_curation: this.qualityCheckTab('qc_curation'), - measurements: this.measurementsTab('measurements') + measurements: this.measurementsTab('measurements'), + history: this.historyTab('history'), }; if (this.enableComputedProps) { @@ -1314,8 +1332,13 @@ export default class SampleDetails extends React.Component { tabTitles={tabTitlesMap} onTabPositionChanged={this.onTabPositionChanged} /> - {this.state.sfn ? : null} - + {tabContents} diff --git a/app/packs/src/components/VersionsTable.js b/app/packs/src/components/VersionsTable.js new file mode 100644 index 0000000000..8e4cadbce7 --- /dev/null +++ b/app/packs/src/components/VersionsTable.js @@ -0,0 +1,137 @@ +/* eslint-disable react/forbid-prop-types */ +import React, { Component } from 'react'; +import PropTypes from 'prop-types'; +import { Pager } from 'react-bootstrap'; +import BootstrapTable from 'react-bootstrap-table-next'; +import VersionsFetcher from './fetchers/VersionsFetcher'; +import VersionsTableTime from './VersionsTableTime'; +import VersionsTableChanges from './VersionsTableChanges'; + +export default class VersionsTable extends Component { + constructor(props) { + super(props); + + this.state = { + versions: [], + page: 1, + pages: 1, + }; + } + + componentDidMount() { + this.fetchVersions(); + } + + handlePagerClick = (eventKey) => { + if (eventKey === 'prev') { + this.setState(state => ({ + page: state.page + 1 + }), this.fetchVersions); + } else { + this.setState(state => ({ + page: state.page - 1 + }), this.fetchVersions); + } + } + + fetchVersions() { + const { type, id } = this.props; + const { page } = this.state; + + VersionsFetcher.fetch({ + type, id, page + }).then((result) => { + if (!result) return false; + + return this.setState({ + versions: result.elements || [], + page: result.page || 1, + pages: result.pages || 1, + }); + }); + } + + render() { + const { versions } = this.state; + + const pagination = () => ( + + this.handlePagerClick('prev')} + disabled={this.state.page >= this.state.pages} + > + ← Previous Page + + this.handlePagerClick('next')} + disabled={this.state.page <= 1} + > + Next Page → + + + ); + + const columns = [ + { + dataField: 'id', + text: '#', + }, + { + dataField: 'createdAt', + text: 'Created', + formatter: cell => ( + + ), + }, + { + dataField: 'klass', + text: 'Entity', + }, + { + dataField: 'name', + text: 'Name', + }, + { + dataField: 'userName', + text: 'Author', + }, + ]; + + const expandRow = { + onlyOneExpanding: true, + parentClassName: 'active', + renderer: row => ( + + ), + }; + + return ( + <> + + + {pagination()} + + ); + } +} + +VersionsTable.propTypes = { + type: PropTypes.string.isRequired, + id: PropTypes.number.isRequired, +}; diff --git a/app/packs/src/components/VersionsTableChanges.js b/app/packs/src/components/VersionsTableChanges.js new file mode 100644 index 0000000000..831ccb812a --- /dev/null +++ b/app/packs/src/components/VersionsTableChanges.js @@ -0,0 +1,67 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import { Row, Col } from 'react-bootstrap'; +import moment from 'moment'; +import QuillViewer from './QuillViewer'; + +const VersionsTableChanges = (props) => { + const { changes } = props; + + const date = input => ( + input ? moment(input).format('YYYY-MM-DD HH:mm') : '' + ); + + const quill = input => ( + input ? : '' + ); + + const numrange = input => ( + input ? input.slice(1, -1).split(',', 1) : '' + ); + + const treeselect = input => ( + (input || '').split(' | ', 2)[1] || input + ); + + const formatValue = (kind, value) => { + const formatters = { + date, + quill, + numrange, + treeselect, + string: () => value, + }; + + return ( + formatters[kind] || formatters.string + )(value); + }; + + return ( + <> + { + changes.map(({ + name, label, kind, oldValue, newValue + }) => ( + + + {label} + + + {formatValue(kind, oldValue)} + + + {formatValue(kind, newValue)} + + + )) + } + + ); +}; + +VersionsTableChanges.propTypes = { + changes: PropTypes.arrayOf(PropTypes.object).isRequired, +}; + +export default VersionsTableChanges; diff --git a/app/packs/src/components/VersionsTableTime.js b/app/packs/src/components/VersionsTableTime.js new file mode 100644 index 0000000000..7674b6b51d --- /dev/null +++ b/app/packs/src/components/VersionsTableTime.js @@ -0,0 +1,29 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import { OverlayTrigger, Tooltip } from 'react-bootstrap'; +import moment from 'moment'; + +const VersionsTableTime = (props) => { + const { dateTime } = props; + + const renderTooltip = () => ( + + {moment(dateTime).format('YYYY-MM-DD HH:mm')} + + ); + + return ( + + {moment(dateTime).fromNow()} + + ); +}; + +VersionsTableTime.propTypes = { + dateTime: PropTypes.instanceOf(Date).isRequired, +}; + +export default VersionsTableTime; diff --git a/app/packs/src/components/fetchers/VersionsFetcher.js b/app/packs/src/components/fetchers/VersionsFetcher.js new file mode 100644 index 0000000000..18aaec6a26 --- /dev/null +++ b/app/packs/src/components/fetchers/VersionsFetcher.js @@ -0,0 +1,27 @@ +import 'whatwg-fetch'; +import Version from '../models/Version'; + +export default class VersionsFetcher { + static fetch({ + type, id, page, perPage + }) { + const url = new URL(`${window.location.origin}/api/v1/versions/${type}/${id}`); + url.search = new URLSearchParams({ + page: page || 1, + per_page: perPage || 10, + }); + + + return fetch(url.href, { + credentials: 'same-origin' + }).then(response => ( + response.json().then(json => ({ + elements: json.versions.map(v => (new Version(v))), + totalElements: parseInt(response.headers.get('X-Total'), 10), + page: parseInt(response.headers.get('X-Page'), 10), + pages: parseInt(response.headers.get('X-Total-Pages'), 10), + perPage: parseInt(response.headers.get('X-Per-Page'), 10) + })) + )).catch((errorMessage) => { console.log(errorMessage); }); + } +} diff --git a/app/packs/src/components/models/Change.js b/app/packs/src/components/models/Change.js new file mode 100644 index 0000000000..ea5ae603ac --- /dev/null +++ b/app/packs/src/components/models/Change.js @@ -0,0 +1,11 @@ +export default class Change { + constructor([name, { + l, k, o, n, + }]) { + this.name = name; + this.label = l; + this.kind = k; + this.oldValue = o; + this.newValue = n; + } +} diff --git a/app/packs/src/components/models/Version.js b/app/packs/src/components/models/Version.js new file mode 100644 index 0000000000..c9c0c30759 --- /dev/null +++ b/app/packs/src/components/models/Version.js @@ -0,0 +1,16 @@ +import Change from '../models/Change'; + +export default class Version { + constructor({ + v, k, n, t, u, c, + }) { + const changes = Object.entries(c).map(change => (new Change(change))); + + this.id = v; + this.klass = k; + this.name = n; + this.createdAt = new Date(t); + this.userName = u; + this.changes = changes; + } +} diff --git a/config/initializers/logidze.rb b/config/initializers/logidze.rb new file mode 100644 index 0000000000..417954c662 --- /dev/null +++ b/config/initializers/logidze.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +Logidze.ignore_log_data_by_default = true + +module Logidze + module Meta + def with_responsible!(responsible_id) + return if responsible_id.nil? + + meta = { Logidze::History::Version::META_RESPONSIBLE => responsible_id } + PermanentMetaWithTransaction.wrap_with(meta, &proc {}) + end + + def clear_responsible! + PermanentMetaWithTransaction.wrap_with({}, &proc {}) + end + + class PermanentMetaWithTransaction < MetaWithoutTransaction + private + + def pg_clear_meta_param; end + end + end +end diff --git a/config/profile_default.yml.example b/config/profile_default.yml.example index 5320c14776..ad59af0241 100644 --- a/config/profile_default.yml.example +++ b/config/profile_default.yml.example @@ -26,6 +26,8 @@ development: 4 :results: 5 + :history: + 6 :layout_detail_reaction: :scheme: 1 @@ -37,6 +39,8 @@ development: 4 :green_chemistry: 5 + :history: + 6 :layout_detail_wellplate: :properties: 1 @@ -80,6 +84,8 @@ production: 4 :results: 5 + :history: + 6 :layout_detail_reaction: :scheme: 1 @@ -91,6 +97,8 @@ production: 4 :green_chemistry: 5 + :history: + 6 :layout_detail_wellplate: :properties: 1 diff --git a/db/functions/logidze_capture_exception_v01.sql b/db/functions/logidze_capture_exception_v01.sql new file mode 100644 index 0000000000..cd05f07194 --- /dev/null +++ b/db/functions/logidze_capture_exception_v01.sql @@ -0,0 +1,23 @@ +CREATE OR REPLACE FUNCTION logidze_capture_exception(error_data jsonb) RETURNS boolean AS $body$ + -- version: 1 +BEGIN + -- Feel free to change this function to change Logidze behavior on exception. + -- + -- Return `false` to raise exception or `true` to commit record changes. + -- + -- `error_data` contains: + -- - returned_sqlstate + -- - message_text + -- - pg_exception_detail + -- - pg_exception_hint + -- - pg_exception_context + -- - schema_name + -- - table_name + -- Learn more about available keys: + -- https://www.postgresql.org/docs/9.6/plpgsql-control-structures.html#PLPGSQL-EXCEPTION-DIAGNOSTICS-VALUES + -- + + return false; +END; +$body$ +LANGUAGE plpgsql; diff --git a/db/functions/logidze_compact_history_v01.sql b/db/functions/logidze_compact_history_v01.sql new file mode 100644 index 0000000000..f4890b20cc --- /dev/null +++ b/db/functions/logidze_compact_history_v01.sql @@ -0,0 +1,38 @@ +CREATE OR REPLACE FUNCTION logidze_compact_history(log_data jsonb, cutoff integer DEFAULT 1) RETURNS jsonb AS $body$ + -- version: 1 + DECLARE + merged jsonb; + BEGIN + LOOP + merged := jsonb_build_object( + 'ts', + log_data#>'{h,1,ts}', + 'v', + log_data#>'{h,1,v}', + 'c', + (log_data#>'{h,0,c}') || (log_data#>'{h,1,c}') + ); + + IF (log_data#>'{h,1}' ? 'm') THEN + merged := jsonb_set(merged, ARRAY['m'], log_data#>'{h,1,m}'); + END IF; + + log_data := jsonb_set( + log_data, + '{h}', + jsonb_set( + log_data->'h', + '{1}', + merged + ) - 0 + ); + + cutoff := cutoff - 1; + + EXIT WHEN cutoff <= 0; + END LOOP; + + return log_data; + END; +$body$ +LANGUAGE plpgsql; diff --git a/db/functions/logidze_filter_keys_v01.sql b/db/functions/logidze_filter_keys_v01.sql new file mode 100644 index 0000000000..877119b361 --- /dev/null +++ b/db/functions/logidze_filter_keys_v01.sql @@ -0,0 +1,27 @@ +CREATE OR REPLACE FUNCTION logidze_filter_keys(obj jsonb, keys text[], include_columns boolean DEFAULT false) RETURNS jsonb AS $body$ + -- version: 1 + DECLARE + res jsonb; + key text; + BEGIN + res := '{}'; + + IF include_columns THEN + FOREACH key IN ARRAY keys + LOOP + IF obj ? key THEN + res = jsonb_insert(res, ARRAY[key], obj->key); + END IF; + END LOOP; + ELSE + res = obj; + FOREACH key IN ARRAY keys + LOOP + res = res - key; + END LOOP; + END IF; + + RETURN res; + END; +$body$ +LANGUAGE plpgsql; diff --git a/db/functions/logidze_logger_v02.sql b/db/functions/logidze_logger_v02.sql new file mode 100644 index 0000000000..0ce1d932d9 --- /dev/null +++ b/db/functions/logidze_logger_v02.sql @@ -0,0 +1,203 @@ +CREATE OR REPLACE FUNCTION logidze_logger() RETURNS TRIGGER AS $body$ + -- version: 2 + DECLARE + changes jsonb; + version jsonb; + snapshot jsonb; + new_v integer; + size integer; + history_limit integer; + debounce_time integer; + current_version integer; + k text; + iterator integer; + item record; + columns text[]; + include_columns boolean; + ts timestamp with time zone; + ts_column text; + err_sqlstate text; + err_message text; + err_detail text; + err_hint text; + err_context text; + err_table_name text; + err_schema_name text; + err_jsonb jsonb; + err_captured boolean; + BEGIN + ts_column := NULLIF(TG_ARGV[1], 'null'); + columns := NULLIF(TG_ARGV[2], 'null'); + include_columns := NULLIF(TG_ARGV[3], 'null'); + + IF TG_OP = 'INSERT' THEN + IF columns IS NOT NULL THEN + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); + ELSE + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); + END IF; + + IF snapshot#>>'{h, -1, c}' != '{}' THEN + NEW.log_data := snapshot; + END IF; + + ELSIF TG_OP = 'UPDATE' THEN + + IF OLD.log_data is NULL OR OLD.log_data = '{}'::jsonb THEN + IF columns IS NOT NULL THEN + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); + ELSE + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); + END IF; + + IF snapshot#>>'{h, -1, c}' != '{}' THEN + NEW.log_data := snapshot; + END IF; + RETURN NEW; + END IF; + + history_limit := NULLIF(TG_ARGV[0], 'null'); + debounce_time := NULLIF(TG_ARGV[4], 'null'); + + current_version := (NEW.log_data->>'v')::int; + + IF ts_column IS NULL THEN + ts := statement_timestamp(); + ELSE + ts := (to_jsonb(NEW.*)->>ts_column)::timestamp with time zone; + IF ts IS NULL OR ts = (to_jsonb(OLD.*)->>ts_column)::timestamp with time zone THEN + ts := statement_timestamp(); + END IF; + END IF; + + IF NEW = OLD THEN + RETURN NEW; + END IF; + + IF current_version < (NEW.log_data#>>'{h,-1,v}')::int THEN + iterator := 0; + FOR item in SELECT * FROM jsonb_array_elements(NEW.log_data->'h') + LOOP + IF (item.value->>'v')::int > current_version THEN + NEW.log_data := jsonb_set( + NEW.log_data, + '{h}', + (NEW.log_data->'h') - iterator + ); + END IF; + iterator := iterator + 1; + END LOOP; + END IF; + + changes := '{}'; + + IF (coalesce(current_setting('logidze.full_snapshot', true), '') = 'on') THEN + BEGIN + changes = hstore_to_jsonb_loose(hstore(NEW.*)); + EXCEPTION + WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN + changes = row_to_json(NEW.*)::jsonb; + FOR k IN (SELECT key FROM jsonb_each(changes)) + LOOP + IF jsonb_typeof(changes->k) = 'object' THEN + changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); + END IF; + END LOOP; + END; + ELSE + BEGIN + changes = hstore_to_jsonb_loose( + hstore(NEW.*) - hstore(OLD.*) + ); + EXCEPTION + WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN + changes = (SELECT + COALESCE(json_object_agg(key, value), '{}')::jsonb + FROM + jsonb_each(row_to_json(NEW.*)::jsonb) + WHERE NOT jsonb_build_object(key, value) <@ row_to_json(OLD.*)::jsonb); + FOR k IN (SELECT key FROM jsonb_each(changes)) + LOOP + IF jsonb_typeof(changes->k) = 'object' THEN + changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); + END IF; + END LOOP; + END; + END IF; + + changes = changes - 'log_data'; + + IF columns IS NOT NULL THEN + changes = logidze_filter_keys(changes, columns, include_columns); + END IF; + + IF changes = '{}' THEN + RETURN NEW; + END IF; + + new_v := (NEW.log_data#>>'{h,-1,v}')::int + 1; + + size := jsonb_array_length(NEW.log_data->'h'); + version := logidze_version(new_v, changes, ts); + + IF ( + debounce_time IS NOT NULL AND + (version->>'ts')::bigint - (NEW.log_data#>'{h,-1,ts}')::text::bigint <= debounce_time + ) THEN + -- merge new version with the previous one + new_v := (NEW.log_data#>>'{h,-1,v}')::int; + version := logidze_version(new_v, (NEW.log_data#>'{h,-1,c}')::jsonb || changes, ts); + -- remove the previous version from log + NEW.log_data := jsonb_set( + NEW.log_data, + '{h}', + (NEW.log_data->'h') - (size - 1) + ); + END IF; + + NEW.log_data := jsonb_set( + NEW.log_data, + ARRAY['h', size::text], + version, + true + ); + + NEW.log_data := jsonb_set( + NEW.log_data, + '{v}', + to_jsonb(new_v) + ); + + IF history_limit IS NOT NULL AND history_limit <= size THEN + NEW.log_data := logidze_compact_history(NEW.log_data, size - history_limit + 1); + END IF; + END IF; + + return NEW; + EXCEPTION + WHEN OTHERS THEN + GET STACKED DIAGNOSTICS err_sqlstate = RETURNED_SQLSTATE, + err_message = MESSAGE_TEXT, + err_detail = PG_EXCEPTION_DETAIL, + err_hint = PG_EXCEPTION_HINT, + err_context = PG_EXCEPTION_CONTEXT, + err_schema_name = SCHEMA_NAME, + err_table_name = TABLE_NAME; + err_jsonb := jsonb_build_object( + 'returned_sqlstate', err_sqlstate, + 'message_text', err_message, + 'pg_exception_detail', err_detail, + 'pg_exception_hint', err_hint, + 'pg_exception_context', err_context, + 'schema_name', err_schema_name, + 'table_name', err_table_name + ); + err_captured = logidze_capture_exception(err_jsonb); + IF err_captured THEN + return NEW; + ELSE + RAISE; + END IF; + END; +$body$ +LANGUAGE plpgsql; diff --git a/db/functions/logidze_snapshot_v03.sql b/db/functions/logidze_snapshot_v03.sql new file mode 100644 index 0000000000..c995662faf --- /dev/null +++ b/db/functions/logidze_snapshot_v03.sql @@ -0,0 +1,33 @@ +CREATE OR REPLACE FUNCTION logidze_snapshot(item jsonb, ts_column text DEFAULT NULL, columns text[] DEFAULT NULL, include_columns boolean DEFAULT false) RETURNS jsonb AS $body$ + -- version: 3 + DECLARE + ts timestamp with time zone; + k text; + BEGIN + item = item - 'log_data'; + IF ts_column IS NULL THEN + ts := statement_timestamp(); + ELSE + ts := coalesce((item->>ts_column)::timestamp with time zone, statement_timestamp()); + END IF; + + IF columns IS NOT NULL THEN + item := logidze_filter_keys(item, columns, include_columns); + END IF; + + FOR k IN (SELECT key FROM jsonb_each(item)) + LOOP + IF jsonb_typeof(item->k) = 'object' THEN + item := jsonb_set(item, ARRAY[k], to_jsonb(item->>k)); + END IF; + END LOOP; + + return json_build_object( + 'v', 1, + 'h', jsonb_build_array( + logidze_version(1, item, ts) + ) + ); + END; +$body$ +LANGUAGE plpgsql; diff --git a/db/functions/logidze_version_v02.sql b/db/functions/logidze_version_v02.sql new file mode 100644 index 0000000000..1bf7ef5f7f --- /dev/null +++ b/db/functions/logidze_version_v02.sql @@ -0,0 +1,21 @@ +CREATE OR REPLACE FUNCTION logidze_version(v bigint, data jsonb, ts timestamp with time zone) RETURNS jsonb AS $body$ + -- version: 2 + DECLARE + buf jsonb; + BEGIN + data = data - 'log_data'; + buf := jsonb_build_object( + 'ts', + (extract(epoch from ts) * 1000)::bigint, + 'v', + v, + 'c', + data + ); + IF coalesce(current_setting('logidze.meta', true), '') <> '' THEN + buf := jsonb_insert(buf, '{m}', current_setting('logidze.meta')::jsonb); + END IF; + RETURN buf; + END; +$body$ +LANGUAGE plpgsql; diff --git a/db/migrate/20210928095129_logidze_install.rb b/db/migrate/20210928095129_logidze_install.rb new file mode 100644 index 0000000000..55eba02ff5 --- /dev/null +++ b/db/migrate/20210928095129_logidze_install.rb @@ -0,0 +1,64 @@ +class LogidzeInstall < ActiveRecord::Migration[5.2] + def change + reversible do |dir| + dir.up do + create_function :logidze_snapshot, version: 3 + end + + dir.down do + execute "DROP FUNCTION IF EXISTS logidze_snapshot(jsonb, text, text[], boolean) CASCADE" + end + end + + reversible do |dir| + dir.up do + create_function :logidze_logger, version: 2 + end + + dir.down do + execute "DROP FUNCTION IF EXISTS logidze_logger() CASCADE" + end + end + + reversible do |dir| + dir.up do + create_function :logidze_version, version: 2 + end + + dir.down do + execute "DROP FUNCTION IF EXISTS logidze_version(bigint, jsonb, timestamp with time zone) CASCADE" + end + end + + reversible do |dir| + dir.up do + create_function :logidze_compact_history, version: 1 + end + + dir.down do + execute "DROP FUNCTION IF EXISTS logidze_compact_history(jsonb, integer) CASCADE" + end + end + + reversible do |dir| + dir.up do + create_function :logidze_capture_exception, version: 1 + end + + dir.down do + execute "DROP FUNCTION IF EXISTS logidze_capture_exception(jsonb) CASCADE" + end + end + + reversible do |dir| + dir.up do + create_function :logidze_filter_keys, version: 1 + end + + dir.down do + execute "DROP FUNCTION IF EXISTS logidze_filter_keys(jsonb, text[], boolean) CASCADE" + end + end + + end +end diff --git a/db/migrate/20210928095414_add_logidze_to_samples.rb b/db/migrate/20210928095414_add_logidze_to_samples.rb new file mode 100644 index 0000000000..d794f0efbe --- /dev/null +++ b/db/migrate/20210928095414_add_logidze_to_samples.rb @@ -0,0 +1,24 @@ +class AddLogidzeToSamples < ActiveRecord::Migration[5.2] + def change + add_column :samples, :log_data, :jsonb + + reversible do |dir| + dir.up do + create_trigger :logidze_on_samples, on: :samples + end + + dir.down do + execute "DROP TRIGGER IF EXISTS logidze_on_samples on samples;" + end + end + + reversible do |dir| + dir.up do + execute <<~SQL + UPDATE samples as t + SET log_data = logidze_snapshot(to_jsonb(t), 'updated_at'); + SQL + end + end + end +end diff --git a/db/migrate/20210928095425_add_logidze_to_reactions.rb b/db/migrate/20210928095425_add_logidze_to_reactions.rb new file mode 100644 index 0000000000..36209bebb7 --- /dev/null +++ b/db/migrate/20210928095425_add_logidze_to_reactions.rb @@ -0,0 +1,24 @@ +class AddLogidzeToReactions < ActiveRecord::Migration[5.2] + def change + add_column :reactions, :log_data, :jsonb + + reversible do |dir| + dir.up do + create_trigger :logidze_on_reactions, on: :reactions + end + + dir.down do + execute "DROP TRIGGER IF EXISTS logidze_on_reactions on reactions;" + end + end + + reversible do |dir| + dir.up do + execute <<~SQL + UPDATE reactions as t + SET log_data = logidze_snapshot(to_jsonb(t), 'updated_at'); + SQL + end + end + end +end diff --git a/db/migrate/20211021120309_add_timestamps_to_reactions_samples.rb b/db/migrate/20211021120309_add_timestamps_to_reactions_samples.rb new file mode 100644 index 0000000000..d1c0ed4b78 --- /dev/null +++ b/db/migrate/20211021120309_add_timestamps_to_reactions_samples.rb @@ -0,0 +1,7 @@ +class AddTimestampsToReactionsSamples < ActiveRecord::Migration[5.2] + def change + change_table :reactions_samples do |t| + t.timestamps null: false, default: '2021-10-1T00:00:00' + end + end +end diff --git a/db/migrate/20211021131722_add_logidze_to_reactions_samples.rb b/db/migrate/20211021131722_add_logidze_to_reactions_samples.rb new file mode 100644 index 0000000000..8a65ce63e8 --- /dev/null +++ b/db/migrate/20211021131722_add_logidze_to_reactions_samples.rb @@ -0,0 +1,24 @@ +class AddLogidzeToReactionsSamples < ActiveRecord::Migration[5.2] + def change + add_column :reactions_samples, :log_data, :jsonb + + reversible do |dir| + dir.up do + create_trigger :logidze_on_reactions_samples, on: :reactions_samples + end + + dir.down do + execute "DROP TRIGGER IF EXISTS logidze_on_reactions_samples on reactions_samples;" + end + end + + reversible do |dir| + dir.up do + execute <<~SQL + UPDATE reactions_samples as t + SET log_data = logidze_snapshot(to_jsonb(t), 'updated_at'); + SQL + end + end + end +end diff --git a/db/migrate/20211111154822_add_logidze_to_residues.rb b/db/migrate/20211111154822_add_logidze_to_residues.rb new file mode 100644 index 0000000000..beeb3839b9 --- /dev/null +++ b/db/migrate/20211111154822_add_logidze_to_residues.rb @@ -0,0 +1,24 @@ +class AddLogidzeToResidues < ActiveRecord::Migration[5.2] + def change + add_column :residues, :log_data, :jsonb + + reversible do |dir| + dir.up do + create_trigger :logidze_on_residues, on: :residues + end + + dir.down do + execute "DROP TRIGGER IF EXISTS logidze_on_residues on residues;" + end + end + + reversible do |dir| + dir.up do + execute <<~SQL + UPDATE residues as t + SET log_data = logidze_snapshot(to_jsonb(t), 'updated_at'); + SQL + end + end + end +end diff --git a/db/migrate/20211111154836_add_logidze_to_elemental_compositions.rb b/db/migrate/20211111154836_add_logidze_to_elemental_compositions.rb new file mode 100644 index 0000000000..80b52ca23c --- /dev/null +++ b/db/migrate/20211111154836_add_logidze_to_elemental_compositions.rb @@ -0,0 +1,24 @@ +class AddLogidzeToElementalCompositions < ActiveRecord::Migration[5.2] + def change + add_column :elemental_compositions, :log_data, :jsonb + + reversible do |dir| + dir.up do + create_trigger :logidze_on_elemental_compositions, on: :elemental_compositions + end + + dir.down do + execute "DROP TRIGGER IF EXISTS logidze_on_elemental_compositions on elemental_compositions;" + end + end + + reversible do |dir| + dir.up do + execute <<~SQL + UPDATE elemental_compositions as t + SET log_data = logidze_snapshot(to_jsonb(t), 'updated_at'); + SQL + end + end + end +end diff --git a/db/migrate/20211112121704_add_logidze_to_attachments.rb b/db/migrate/20211112121704_add_logidze_to_attachments.rb new file mode 100644 index 0000000000..e1db271c35 --- /dev/null +++ b/db/migrate/20211112121704_add_logidze_to_attachments.rb @@ -0,0 +1,24 @@ +class AddLogidzeToAttachments < ActiveRecord::Migration[5.2] + def change + add_column :attachments, :log_data, :jsonb + + reversible do |dir| + dir.up do + create_trigger :logidze_on_attachments, on: :attachments + end + + dir.down do + execute "DROP TRIGGER IF EXISTS logidze_on_attachments on attachments;" + end + end + + reversible do |dir| + dir.up do + execute <<~SQL + UPDATE attachments as t + SET log_data = logidze_snapshot(to_jsonb(t), 'updated_at'); + SQL + end + end + end +end diff --git a/db/migrate/20211112121733_add_logidze_to_containers.rb b/db/migrate/20211112121733_add_logidze_to_containers.rb new file mode 100644 index 0000000000..3ec5f2a9ad --- /dev/null +++ b/db/migrate/20211112121733_add_logidze_to_containers.rb @@ -0,0 +1,24 @@ +class AddLogidzeToContainers < ActiveRecord::Migration[5.2] + def change + add_column :containers, :log_data, :jsonb + + reversible do |dir| + dir.up do + create_trigger :logidze_on_containers, on: :containers + end + + dir.down do + execute "DROP TRIGGER IF EXISTS logidze_on_containers on containers;" + end + end + + reversible do |dir| + dir.up do + execute <<~SQL + UPDATE containers as t + SET log_data = logidze_snapshot(to_jsonb(t), 'updated_at'); + SQL + end + end + end +end diff --git a/db/schema.rb b/db/schema.rb index f5949dbc8b..8f982124da 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -69,6 +69,7 @@ t.string "attachable_type" t.string "aasm_state" t.bigint "filesize" + t.jsonb "log_data" t.index ["attachable_type", "attachable_id"], name: "index_attachments_on_attachable_type_and_attachable_id" t.index ["identifier"], name: "index_attachments_on_identifier", unique: true end @@ -228,6 +229,7 @@ t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "parent_id" + t.jsonb "log_data" t.index ["containable_type", "containable_id"], name: "index_containers_on_containable" end @@ -376,6 +378,7 @@ t.float "loading" t.datetime "created_at" t.datetime "updated_at" + t.jsonb "log_data" t.index ["sample_id"], name: "index_elemental_compositions_on_sample_id" end @@ -762,6 +765,7 @@ t.string "duration" t.string "rxno" t.string "conditions" + t.jsonb "log_data" t.index ["deleted_at"], name: "index_reactions_on_deleted_at" t.index ["rinchi_web_key"], name: "index_reactions_on_rinchi_web_key" t.index ["role"], name: "index_reactions_on_role" @@ -778,6 +782,9 @@ t.boolean "waste", default: false t.float "coefficient", default: 1.0 t.boolean "show_label", default: false, null: false + t.datetime "created_at", default: "2021-10-01 00:00:00", null: false + t.datetime "updated_at", default: "2021-10-01 00:00:00", null: false + t.jsonb "log_data" t.index ["reaction_id"], name: "index_reactions_samples_on_reaction_id" t.index ["sample_id"], name: "index_reactions_samples_on_sample_id" end @@ -908,6 +915,7 @@ t.hstore "custom_info" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.jsonb "log_data" t.index ["sample_id"], name: "index_residues_on_sample_id" end @@ -951,6 +959,7 @@ t.float "molecular_mass" t.string "sum_formula" t.jsonb "solvent" + t.jsonb "log_data" t.index ["deleted_at"], name: "index_samples_on_deleted_at" t.index ["identifier"], name: "index_samples_on_identifier" t.index ["molecule_id"], name: "index_samples_on_sample_id" @@ -1424,8 +1433,397 @@ where l.literature_id = l2.id and l.element_type = $1 and l.element_id = $2 $function$ + create_function :logidze_snapshot, sql_definition: <<-SQL + CREATE OR REPLACE FUNCTION public.logidze_snapshot(item jsonb, ts_column text DEFAULT NULL::text, columns text[] DEFAULT NULL::text[], include_columns boolean DEFAULT false) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 3 + DECLARE + ts timestamp with time zone; + k text; + BEGIN + item = item - 'log_data'; + IF ts_column IS NULL THEN + ts := statement_timestamp(); + ELSE + ts := coalesce((item->>ts_column)::timestamp with time zone, statement_timestamp()); + END IF; + + IF columns IS NOT NULL THEN + item := logidze_filter_keys(item, columns, include_columns); + END IF; + + FOR k IN (SELECT key FROM jsonb_each(item)) + LOOP + IF jsonb_typeof(item->k) = 'object' THEN + item := jsonb_set(item, ARRAY[k], to_jsonb(item->>k)); + END IF; + END LOOP; + + return json_build_object( + 'v', 1, + 'h', jsonb_build_array( + logidze_version(1, item, ts) + ) + ); + END; + $function$ + SQL + create_function :logidze_logger, sql_definition: <<-SQL + CREATE OR REPLACE FUNCTION public.logidze_logger() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + -- version: 2 + DECLARE + changes jsonb; + version jsonb; + snapshot jsonb; + new_v integer; + size integer; + history_limit integer; + debounce_time integer; + current_version integer; + k text; + iterator integer; + item record; + columns text[]; + include_columns boolean; + ts timestamp with time zone; + ts_column text; + err_sqlstate text; + err_message text; + err_detail text; + err_hint text; + err_context text; + err_table_name text; + err_schema_name text; + err_jsonb jsonb; + err_captured boolean; + BEGIN + ts_column := NULLIF(TG_ARGV[1], 'null'); + columns := NULLIF(TG_ARGV[2], 'null'); + include_columns := NULLIF(TG_ARGV[3], 'null'); + + IF TG_OP = 'INSERT' THEN + IF columns IS NOT NULL THEN + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); + ELSE + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); + END IF; + + IF snapshot#>>'{h, -1, c}' != '{}' THEN + NEW.log_data := snapshot; + END IF; + + ELSIF TG_OP = 'UPDATE' THEN + + IF OLD.log_data is NULL OR OLD.log_data = '{}'::jsonb THEN + IF columns IS NOT NULL THEN + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column, columns, include_columns); + ELSE + snapshot = logidze_snapshot(to_jsonb(NEW.*), ts_column); + END IF; + + IF snapshot#>>'{h, -1, c}' != '{}' THEN + NEW.log_data := snapshot; + END IF; + RETURN NEW; + END IF; + + history_limit := NULLIF(TG_ARGV[0], 'null'); + debounce_time := NULLIF(TG_ARGV[4], 'null'); + + current_version := (NEW.log_data->>'v')::int; + + IF ts_column IS NULL THEN + ts := statement_timestamp(); + ELSE + ts := (to_jsonb(NEW.*)->>ts_column)::timestamp with time zone; + IF ts IS NULL OR ts = (to_jsonb(OLD.*)->>ts_column)::timestamp with time zone THEN + ts := statement_timestamp(); + END IF; + END IF; + + IF NEW = OLD THEN + RETURN NEW; + END IF; + + IF current_version < (NEW.log_data#>>'{h,-1,v}')::int THEN + iterator := 0; + FOR item in SELECT * FROM jsonb_array_elements(NEW.log_data->'h') + LOOP + IF (item.value->>'v')::int > current_version THEN + NEW.log_data := jsonb_set( + NEW.log_data, + '{h}', + (NEW.log_data->'h') - iterator + ); + END IF; + iterator := iterator + 1; + END LOOP; + END IF; + + changes := '{}'; + + IF (coalesce(current_setting('logidze.full_snapshot', true), '') = 'on') THEN + BEGIN + changes = hstore_to_jsonb_loose(hstore(NEW.*)); + EXCEPTION + WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN + changes = row_to_json(NEW.*)::jsonb; + FOR k IN (SELECT key FROM jsonb_each(changes)) + LOOP + IF jsonb_typeof(changes->k) = 'object' THEN + changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); + END IF; + END LOOP; + END; + ELSE + BEGIN + changes = hstore_to_jsonb_loose( + hstore(NEW.*) - hstore(OLD.*) + ); + EXCEPTION + WHEN NUMERIC_VALUE_OUT_OF_RANGE THEN + changes = (SELECT + COALESCE(json_object_agg(key, value), '{}')::jsonb + FROM + jsonb_each(row_to_json(NEW.*)::jsonb) + WHERE NOT jsonb_build_object(key, value) <@ row_to_json(OLD.*)::jsonb); + FOR k IN (SELECT key FROM jsonb_each(changes)) + LOOP + IF jsonb_typeof(changes->k) = 'object' THEN + changes = jsonb_set(changes, ARRAY[k], to_jsonb(changes->>k)); + END IF; + END LOOP; + END; + END IF; + + changes = changes - 'log_data'; + + IF columns IS NOT NULL THEN + changes = logidze_filter_keys(changes, columns, include_columns); + END IF; + + IF changes = '{}' THEN + RETURN NEW; + END IF; + + new_v := (NEW.log_data#>>'{h,-1,v}')::int + 1; + + size := jsonb_array_length(NEW.log_data->'h'); + version := logidze_version(new_v, changes, ts); + + IF ( + debounce_time IS NOT NULL AND + (version->>'ts')::bigint - (NEW.log_data#>'{h,-1,ts}')::text::bigint <= debounce_time + ) THEN + -- merge new version with the previous one + new_v := (NEW.log_data#>>'{h,-1,v}')::int; + version := logidze_version(new_v, (NEW.log_data#>'{h,-1,c}')::jsonb || changes, ts); + -- remove the previous version from log + NEW.log_data := jsonb_set( + NEW.log_data, + '{h}', + (NEW.log_data->'h') - (size - 1) + ); + END IF; + + NEW.log_data := jsonb_set( + NEW.log_data, + ARRAY['h', size::text], + version, + true + ); + + NEW.log_data := jsonb_set( + NEW.log_data, + '{v}', + to_jsonb(new_v) + ); + + IF history_limit IS NOT NULL AND history_limit <= size THEN + NEW.log_data := logidze_compact_history(NEW.log_data, size - history_limit + 1); + END IF; + END IF; + + return NEW; + EXCEPTION + WHEN OTHERS THEN + GET STACKED DIAGNOSTICS err_sqlstate = RETURNED_SQLSTATE, + err_message = MESSAGE_TEXT, + err_detail = PG_EXCEPTION_DETAIL, + err_hint = PG_EXCEPTION_HINT, + err_context = PG_EXCEPTION_CONTEXT, + err_schema_name = SCHEMA_NAME, + err_table_name = TABLE_NAME; + err_jsonb := jsonb_build_object( + 'returned_sqlstate', err_sqlstate, + 'message_text', err_message, + 'pg_exception_detail', err_detail, + 'pg_exception_hint', err_hint, + 'pg_exception_context', err_context, + 'schema_name', err_schema_name, + 'table_name', err_table_name + ); + err_captured = logidze_capture_exception(err_jsonb); + IF err_captured THEN + return NEW; + ELSE + RAISE; + END IF; + END; + $function$ + SQL + create_function :logidze_version, sql_definition: <<-SQL + CREATE OR REPLACE FUNCTION public.logidze_version(v bigint, data jsonb, ts timestamp with time zone) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 2 + DECLARE + buf jsonb; + BEGIN + data = data - 'log_data'; + buf := jsonb_build_object( + 'ts', + (extract(epoch from ts) * 1000)::bigint, + 'v', + v, + 'c', + data + ); + IF coalesce(current_setting('logidze.meta', true), '') <> '' THEN + buf := jsonb_insert(buf, '{m}', current_setting('logidze.meta')::jsonb); + END IF; + RETURN buf; + END; + $function$ + SQL + create_function :logidze_compact_history, sql_definition: <<-SQL + CREATE OR REPLACE FUNCTION public.logidze_compact_history(log_data jsonb, cutoff integer DEFAULT 1) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 1 + DECLARE + merged jsonb; + BEGIN + LOOP + merged := jsonb_build_object( + 'ts', + log_data#>'{h,1,ts}', + 'v', + log_data#>'{h,1,v}', + 'c', + (log_data#>'{h,0,c}') || (log_data#>'{h,1,c}') + ); + + IF (log_data#>'{h,1}' ? 'm') THEN + merged := jsonb_set(merged, ARRAY['m'], log_data#>'{h,1,m}'); + END IF; + + log_data := jsonb_set( + log_data, + '{h}', + jsonb_set( + log_data->'h', + '{1}', + merged + ) - 0 + ); + + cutoff := cutoff - 1; + + EXIT WHEN cutoff <= 0; + END LOOP; + + return log_data; + END; + $function$ + SQL + create_function :logidze_capture_exception, sql_definition: <<-SQL + CREATE OR REPLACE FUNCTION public.logidze_capture_exception(error_data jsonb) + RETURNS boolean + LANGUAGE plpgsql + AS $function$ + -- version: 1 + BEGIN + -- Feel free to change this function to change Logidze behavior on exception. + -- + -- Return `false` to raise exception or `true` to commit record changes. + -- + -- `error_data` contains: + -- - returned_sqlstate + -- - message_text + -- - pg_exception_detail + -- - pg_exception_hint + -- - pg_exception_context + -- - schema_name + -- - table_name + -- Learn more about available keys: + -- https://www.postgresql.org/docs/9.6/plpgsql-control-structures.html#PLPGSQL-EXCEPTION-DIAGNOSTICS-VALUES + -- + + return false; + END; + $function$ + SQL + create_function :logidze_filter_keys, sql_definition: <<-SQL + CREATE OR REPLACE FUNCTION public.logidze_filter_keys(obj jsonb, keys text[], include_columns boolean DEFAULT false) + RETURNS jsonb + LANGUAGE plpgsql + AS $function$ + -- version: 1 + DECLARE + res jsonb; + key text; + BEGIN + res := '{}'; + + IF include_columns THEN + FOREACH key IN ARRAY keys + LOOP + IF obj ? key THEN + res = jsonb_insert(res, ARRAY[key], obj->key); + END IF; + END LOOP; + ELSE + res = obj; + FOREACH key IN ARRAY keys + LOOP + res = res - key; + END LOOP; + END IF; + + RETURN res; + END; + $function$ SQL + create_trigger :logidze_on_reactions, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_reactions BEFORE INSERT OR UPDATE ON public.reactions FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_samples, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_samples BEFORE INSERT OR UPDATE ON public.samples FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_residues, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_residues BEFORE INSERT OR UPDATE ON public.residues FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_elemental_compositions, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_elemental_compositions BEFORE INSERT OR UPDATE ON public.elemental_compositions FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_containers, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_containers BEFORE INSERT OR UPDATE ON public.containers FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_attachments, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_attachments BEFORE INSERT OR UPDATE ON public.attachments FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL + create_trigger :logidze_on_reactions_samples, sql_definition: <<-SQL + CREATE TRIGGER logidze_on_reactions_samples BEFORE INSERT OR UPDATE ON public.reactions_samples FOR EACH ROW WHEN ((COALESCE(current_setting('logidze.disabled'::text, true), ''::text) <> 'on'::text)) EXECUTE FUNCTION logidze_logger('null', 'updated_at') + SQL create_trigger :update_users_matrix_trg, sql_definition: <<-SQL CREATE TRIGGER update_users_matrix_trg AFTER INSERT OR UPDATE ON public.matrices FOR EACH ROW EXECUTE FUNCTION update_users_matrix() SQL diff --git a/db/triggers/logidze_on_attachments_v01.sql b/db/triggers/logidze_on_attachments_v01.sql new file mode 100644 index 0000000000..bd649e288d --- /dev/null +++ b/db/triggers/logidze_on_attachments_v01.sql @@ -0,0 +1,6 @@ +CREATE TRIGGER logidze_on_attachments +BEFORE UPDATE OR INSERT ON attachments FOR EACH ROW +WHEN (coalesce(current_setting('logidze.disabled', true), '') <> 'on') +-- Parameters: history_size_limit (integer), timestamp_column (text), filtered_columns (text[]), +-- include_columns (boolean), debounce_time_ms (integer) +EXECUTE PROCEDURE logidze_logger(null, 'updated_at'); diff --git a/db/triggers/logidze_on_containers_v01.sql b/db/triggers/logidze_on_containers_v01.sql new file mode 100644 index 0000000000..ac5f7bd440 --- /dev/null +++ b/db/triggers/logidze_on_containers_v01.sql @@ -0,0 +1,6 @@ +CREATE TRIGGER logidze_on_containers +BEFORE UPDATE OR INSERT ON containers FOR EACH ROW +WHEN (coalesce(current_setting('logidze.disabled', true), '') <> 'on') +-- Parameters: history_size_limit (integer), timestamp_column (text), filtered_columns (text[]), +-- include_columns (boolean), debounce_time_ms (integer) +EXECUTE PROCEDURE logidze_logger(null, 'updated_at'); diff --git a/db/triggers/logidze_on_elemental_compositions_v01.sql b/db/triggers/logidze_on_elemental_compositions_v01.sql new file mode 100644 index 0000000000..88d08c574c --- /dev/null +++ b/db/triggers/logidze_on_elemental_compositions_v01.sql @@ -0,0 +1,6 @@ +CREATE TRIGGER logidze_on_elemental_compositions +BEFORE UPDATE OR INSERT ON elemental_compositions FOR EACH ROW +WHEN (coalesce(current_setting('logidze.disabled', true), '') <> 'on') +-- Parameters: history_size_limit (integer), timestamp_column (text), filtered_columns (text[]), +-- include_columns (boolean), debounce_time_ms (integer) +EXECUTE PROCEDURE logidze_logger(null, 'updated_at'); diff --git a/db/triggers/logidze_on_reactions_samples_v01.sql b/db/triggers/logidze_on_reactions_samples_v01.sql new file mode 100644 index 0000000000..a9a6da3c75 --- /dev/null +++ b/db/triggers/logidze_on_reactions_samples_v01.sql @@ -0,0 +1,6 @@ +CREATE TRIGGER logidze_on_reactions_samples +BEFORE UPDATE OR INSERT ON reactions_samples FOR EACH ROW +WHEN (coalesce(current_setting('logidze.disabled', true), '') <> 'on') +-- Parameters: history_size_limit (integer), timestamp_column (text), filtered_columns (text[]), +-- include_columns (boolean), debounce_time_ms (integer) +EXECUTE PROCEDURE logidze_logger(null, 'updated_at'); diff --git a/db/triggers/logidze_on_reactions_v01.sql b/db/triggers/logidze_on_reactions_v01.sql new file mode 100644 index 0000000000..ad3f473ea7 --- /dev/null +++ b/db/triggers/logidze_on_reactions_v01.sql @@ -0,0 +1,6 @@ +CREATE TRIGGER logidze_on_reactions +BEFORE UPDATE OR INSERT ON reactions FOR EACH ROW +WHEN (coalesce(current_setting('logidze.disabled', true), '') <> 'on') +-- Parameters: history_size_limit (integer), timestamp_column (text), filtered_columns (text[]), +-- include_columns (boolean), debounce_time_ms (integer) +EXECUTE PROCEDURE logidze_logger(null, 'updated_at'); diff --git a/db/triggers/logidze_on_residues_v01.sql b/db/triggers/logidze_on_residues_v01.sql new file mode 100644 index 0000000000..361a12725e --- /dev/null +++ b/db/triggers/logidze_on_residues_v01.sql @@ -0,0 +1,6 @@ +CREATE TRIGGER logidze_on_residues +BEFORE UPDATE OR INSERT ON residues FOR EACH ROW +WHEN (coalesce(current_setting('logidze.disabled', true), '') <> 'on') +-- Parameters: history_size_limit (integer), timestamp_column (text), filtered_columns (text[]), +-- include_columns (boolean), debounce_time_ms (integer) +EXECUTE PROCEDURE logidze_logger(null, 'updated_at'); diff --git a/db/triggers/logidze_on_samples_v01.sql b/db/triggers/logidze_on_samples_v01.sql new file mode 100644 index 0000000000..4bfd8cd911 --- /dev/null +++ b/db/triggers/logidze_on_samples_v01.sql @@ -0,0 +1,6 @@ +CREATE TRIGGER logidze_on_samples +BEFORE UPDATE OR INSERT ON samples FOR EACH ROW +WHEN (coalesce(current_setting('logidze.disabled', true), '') <> 'on') +-- Parameters: history_size_limit (integer), timestamp_column (text), filtered_columns (text[]), +-- include_columns (boolean), debounce_time_ms (integer) +EXECUTE PROCEDURE logidze_logger(null, 'updated_at'); diff --git a/package.json b/package.json index 2f3fabb5a0..63664d94b1 100644 --- a/package.json +++ b/package.json @@ -57,6 +57,7 @@ "react-async-script-loader": "0.3.0", "react-barcode": "^1.1.0", "react-bootstrap": "~0.33.1", + "react-bootstrap-table-next": "^4.0.3", "react-color": "^2.17.3", "react-contextmenu": "^2.14.0", "react-cookie": "^0.4.8", diff --git a/yarn.lock b/yarn.lock index a0e4926180..3e39d85278 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9874,6 +9874,15 @@ react-barcode@^1.1.0: jsbarcode "^3.8.0" prop-types "^15.6.2" +react-bootstrap-table-next@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/react-bootstrap-table-next/-/react-bootstrap-table-next-4.0.3.tgz#b55873b01adfe22a7181904b784a9d24ac2822cf" + integrity sha512-uKxC73qUdUfusRf2uzDfMiF9LvTG5vuhTZa0lbAgHWSLLLaKTsI0iHf1e4+c7gP71q8dFsp7StvkP65SxC1JRg== + dependencies: + classnames "^2.2.5" + react-transition-group "^4.2.0" + underscore "1.9.1" + react-bootstrap@~0.33.1: version "0.33.1" resolved "https://registry.npmjs.org/react-bootstrap/-/react-bootstrap-0.33.1.tgz" @@ -10384,7 +10393,7 @@ react-transition-group@^2.0.0, react-transition-group@^2.2.1: prop-types "^15.6.2" react-lifecycles-compat "^3.0.4" -react-transition-group@^4.3.0, react-transition-group@^4.4.0: +react-transition-group@^4.3.0: version "4.4.2" resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.2.tgz#8b59a56f09ced7b55cbd53c36768b922890d5470" integrity sha512-/RNYfRAMlZwDSr6z4zNKV6xu53/e2BuaBbGhbyYIXTrmgu/bGHzmqOs7mJSJBHy9Ud+ApHx3QjrkKSp1pxvlFg== @@ -10394,6 +10403,16 @@ react-transition-group@^4.3.0, react-transition-group@^4.4.0: loose-envify "^1.4.0" prop-types "^15.6.2" +react-transition-group@^4.3.0: + version "4.4.1" + resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.1.tgz#63868f9325a38ea5ee9535d828327f85773345c9" + integrity sha512-Djqr7OQ2aPUiYurhPalTrVy9ddmFCCzwhqQmtN+J3+3DzLO209Fdr70QrN8Z3DsglWql6iY1lDWAfpFiBtuKGw== + dependencies: + "@babel/runtime" "^7.5.5" + dom-helpers "^5.0.1" + loose-envify "^1.4.0" + prop-types "^15.6.2" + react-treeview@0.4.7: version "0.4.7" resolved "https://registry.npmjs.org/react-treeview/-/react-treeview-0.4.7.tgz" @@ -12000,6 +12019,11 @@ unicode-canonical-property-names-ecmascript@^2.0.0: resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== +underscore@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961" + integrity sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg== + unicode-match-property-ecmascript@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz"