func/imports/create-import.js

"use strict";

Object.defineProperty(exports, "__esModule", {
  value: true
});
exports.createImport = createImport;
var _get2 = _interopRequireDefault(require("lodash/get"));
var _snakeCase2 = _interopRequireDefault(require("lodash/snakeCase"));
var _omit2 = _interopRequireDefault(require("lodash/omit"));
var _entity = require("../../types/entity");
var _util = require("../../util");
var _entity2 = require("../entity");
var _misc = require("./misc");
var _alias = require("../alias");
var _disambiguation = require("../disambiguation");
var _identifier = require("../identifier");
var _language = require("../language");
var _releaseEvent = require("../releaseEvent");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/*
 * Copyright (C) 2018  Shivam Tripathi
 *               2023  David Kellner
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 2 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License along
 * with this program; if not, write to the Free Software Foundation, Inc.,
 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
 */

function createImportRecord(transacting, data) {
  return transacting.insert((0, _util.camelToSnake)(data)).into('bookbrainz.import').returning('id');
}
function createOrUpdateImportMetadata(transacting, record) {
  return transacting.insert((0, _util.camelToSnake)(record)).into('bookbrainz.link_import').onConflict(['origin_source_id', 'origin_id']).merge();
}
function getImportMetadata(transacting, externalSourceId, externalIdentifier) {
  return transacting.select('import_id', 'entity_id').from('bookbrainz.link_import').where((0, _util.camelToSnake)({
    originId: externalIdentifier,
    originSourceId: externalSourceId
  }));
}

/** IDs of extra data sets which not all entity types have. */

/** IDs of all data sets which an entity can have. */

function createImportDataRecord(transacting, dataSets, importData) {
  const {
    entityType
  } = importData;

  /* We omit all extra props which are not taken in as args when creating an
  entity_data record, else it will raise error (of there being no such
  column in the table).
  Entity data props use split versions of dates into (day, month and year)
  	and not directly dates, so we omit them.
  */
  const additionalEntityProps = (0, _omit2.default)((0, _entity2.getAdditionalEntityProps)(importData.data, entityType), ['beginDate', 'endDate']);
  const dataRecordProps = {
    ...dataSets,
    ...additionalEntityProps
  };
  return transacting.insert((0, _util.camelToSnake)(dataRecordProps)).into(`bookbrainz.${(0, _snakeCase2.default)(entityType)}_data`).returning('id');
}
function createOrUpdateImportHeader(transacting, record, entityType) {
  const table = `bookbrainz.${(0, _snakeCase2.default)(entityType)}_import_header`;
  return transacting.insert((0, _util.camelToSnake)(record)).into(table).onConflict('import_id').merge();
}
async function updateEntityExtraDataSets(orm, transacting, importData) {
  // Extract all entity data sets' related fields
  const {
    languages,
    releaseEvents
  } = importData;
  const dataSets = {};
  if (languages) {
    const languageSet = await (0, _language.updateLanguageSet)(orm, transacting, null, languages);
    dataSets.languageSetId = languageSet && languageSet.get('id');
  }
  if (releaseEvents) {
    const releaseEventSet = await (0, _releaseEvent.updateReleaseEventSet)(orm, transacting, null, releaseEvents);
    dataSets.releaseEventSetId = releaseEventSet && releaseEventSet.get('id');
  }
  // Skipping publisher field, as they're not required in imports.

  return dataSets;
}
function createImport(orm, importData, {
  existingImportAction = 'skip'
} = {}) {
  if (!_entity.ENTITY_TYPES.includes(importData.entityType)) {
    throw new Error('Invalid entity type');
  }
  return orm.bookshelf.transaction(async transacting => {
    const {
      entityType
    } = importData;
    const {
      alias,
      identifiers,
      disambiguation,
      source
    } = importData.data;

    // Get origin_source
    let originSourceId = null;
    try {
      originSourceId = await (0, _misc.getOriginSourceId)(transacting, source);
    } catch (err) {
      // TODO: useless, we are only catching our self-thrown errors here
      throw new Error(`Error during getting source id - ${err}`);
    }
    const [existingImport] = await getImportMetadata(transacting, originSourceId, importData.originId);
    if (existingImport) {
      const isPendingImport = !existingImport.entity_id;
      if (existingImportAction === 'skip') {
        return {
          importId: existingImport.import_id,
          status: isPendingImport ? 'skipped pending' : 'skipped accepted'
        };
      } else if (isPendingImport) {
        // TODO: update/reuse already existing data of pending imports
      } else {
        // The previously imported entity has already been accepted
        if (existingImportAction === 'update pending') {
          // We only want to update pending, but not accepted entities
          return {
            importId: existingImport.import_id,
            status: 'skipped accepted'
          };
        }
        // We also want to create updates for already accepted entities ('update pending and accepted')
        // TODO: implement this feature in a later version and drop the following temporary return statement
        return {
          importId: existingImport.import_id,
          status: 'skipped accepted'
        };
      }
    }
    const [aliasSet, identifierSet, disambiguationObj, entityExtraDataSets] = await Promise.all([(0, _alias.updateAliasSet)(orm, transacting, null, null, alias), (0, _identifier.updateIdentifierSet)(orm, transacting, null, identifiers), (0, _disambiguation.updateDisambiguation)(orm, transacting, null, disambiguation), updateEntityExtraDataSets(orm, transacting, importData.data)]);

    // Create entity type-specific data record
    let dataId = null;
    try {
      const [idObj] = await createImportDataRecord(transacting, {
        aliasSetId: aliasSet && aliasSet.get('id'),
        disambiguationId: disambiguationObj && disambiguationObj.get('id'),
        identifierSetId: identifierSet && identifierSet.get('id'),
        ...entityExtraDataSets
      }, importData);
      dataId = (0, _get2.default)(idObj, 'id');
    } catch (err) {
      throw new Error(`Error during dataId creation ${err}`);
    }

    // Create import entity (if it is not already existing from a previous import attempt)
    let importId = existingImport?.import_id;
    if (!importId) {
      try {
        const [idObj] = await createImportRecord(transacting, {
          type: entityType
        });
        importId = (0, _get2.default)(idObj, 'id');
      } catch (err) {
        throw new Error(`Failed to create a new import ID: ${err}`);
      }
    }
    const importMetadata = {
      importId,
      importMetadata: importData.data.metadata,
      importedAt: transacting.raw("timezone('UTC'::TEXT, now())"),
      lastEdited: importData.lastEdited,
      originId: importData.originId,
      originSourceId
    };
    try {
      await createOrUpdateImportMetadata(transacting, importMetadata);
    } catch (err) {
      throw new Error(`Failed to upsert import metadata: ${err}`);
    }
    try {
      await createOrUpdateImportHeader(transacting, {
        dataId,
        importId
      }, entityType);
    } catch (err) {
      throw new Error(`Failed to upsert import header: ${err}`);
    }
    return {
      importId,
      status: existingImport ? 'updated pending' : 'created pending'
    };
  });
}