1
0
Fork 0

wip(backend): More data migration code

This commit is contained in:
joostdecock 2023-08-09 20:40:38 +02:00
parent ab570b7ebc
commit 30d48f1c07
8 changed files with 248 additions and 26 deletions

View file

@ -6,11 +6,18 @@ import dotenv from 'dotenv'
dotenv.config()
const rmdb = async () => {
// Figure out what file would be removed so we can include it in the warning
let db = process.env.BACKEND_DB_URL
// Deal with prisma considering its own folder as the root
if (db.slice(0, 7) === 'file:./') db = './prisma/' + db.slice(7)
else db = db.slice(6)
console.log({ db })
// Say hi
console.log(banner + '\n')
console.log(`
🚨 This will ${chalk.yellow('remove your database')}
🚨 This will ${chalk.yellow('remove your database')} : ${chalk.bold.red(db)}
There is ${chalk.bold('no way back')} from this - proceed with caution
`)
@ -26,7 +33,6 @@ const rmdb = async () => {
if (answer.confirms) {
console.log()
// Nuke it from orbit
const db = process.env.BACKEND_DB_URL.slice(6)
fs.access(db, fs.constants.W_OK, (err) => {
if (err) console.log(` ⛔ Cannot remove ${chalk.green(db)} 🤔`)
else {

View file

@ -149,6 +149,7 @@ if (baseConfig.use.cloudflareImages) {
account,
api: `https://api.cloudflare.com/client/v4/accounts/${account}/images/v1`,
token: process.env.BACKEND_CLOUDFLARE_IMAGES_TOKEN || 'fixmeSetCloudflareToken',
import: envToBool(process.env.BACKEND_IMPORT_CLOUDFLARE_IMAGES),
}
}

View file

@ -431,3 +431,164 @@ PatternModel.prototype.asPublicPattern = function () {
return data
}
const migratePattern = (v2, userId) => ({
createdAt: new Date(v2.created ? v2.created : v2.createdAt),
data: { version: v2.data.version, notes: ['Migrated from version 2'] },
design: v2.design || v2.data.design,
name: v2.name || '--',
notes: v2.notes ? v2.notes + '\n\nMigrated from v2' : 'Migrated from v2',
settings: v2.data.settings,
userId,
})
const v2lut = {
'size 28, with breasts': 1,
'size 30, with breasts': 2,
'size 32, with breasts': 3,
'size 34, with breasts': 4,
'size 36, with breasts': 5,
'size 38, with breasts': 6,
'size 40, with breasts': 7,
'size 42, with breasts': 8,
'size 44, with breasts': 9,
'size 46, with breasts': 10,
'size-28-b': 1,
'size-30-b': 2,
'size-32-b': 3,
'size-34-b': 4,
'size-36-b': 5,
'size-38-b': 6,
'size-40-b': 7,
'size-42-b': 8,
'size-44-b': 9,
'size-46-b': 10,
'size-28-with-breasts': 1,
'size-30-with-breasts': 2,
'size-32-with-breasts': 3,
'size-34-with-breasts': 4,
'size-36-with-breasts': 5,
'size-38-with-breasts': 6,
'size-40-with-breasts': 7,
'size-42-with-breasts': 8,
'size-44-with-breasts': 9,
'size-46-with-breasts': 10,
'größe 28, mit brüsten': 1,
'größe 30, mit brüsten': 2,
'größe 32, mit brüsten': 3,
'größe 34, mit brüsten': 4,
'größe 36, mit brüsten': 5,
'größe 38, mit brüsten': 6,
'größe 40, mit brüsten': 7,
'größe 42, mit brüsten': 8,
'größe 44, mit brüsten': 9,
'größe 46, mit brüsten': 10,
'taille 28, avec des seins': 1,
'taille 30, avec des seins': 2,
'taille 32, avec des seins': 3,
'taille 34, avec des seins': 4,
'taille 36, avec des seins': 5,
'taille 38, avec des seins': 6,
'taille 40, avec des seins': 7,
'taille 42, avec des seins': 8,
'taille 44, avec des seins': 9,
'taille 46, avec des seins': 10,
'tamaño 28, con pechos': 1,
'tamaño 30, con pechos': 2,
'tamaño 32, con pechos': 3,
'tamaño 34, con pechos': 4,
'tamaño 36, con pechos': 5,
'tamaño 38, con pechos': 6,
'tamaño 40, con pechos': 7,
'tamaño 42, con pechos': 8,
'tamaño 44, con pechos': 9,
'tamaño 46, con pechos': 10,
'size 32, without breasts': 11,
'size 34, without breasts': 12,
'size 36, without breasts': 13,
'size 38, without breasts': 14,
'size 40, without breasts': 15,
'size 42, without breasts': 16,
'size 44, without breasts': 17,
'size 46, without breasts': 18,
'size 48, without breasts': 19,
'size 50, without breasts': 20,
'taille 32, sans seins': 11,
'taille 34, sans seins': 12,
'taille 36, sans seins': 13,
'taille 38, sans seins': 14,
'taille 40, sans seins': 15,
'taille 42, sans seins': 16,
'taille 44, sans seins': 17,
'taille 46, sans seins': 18,
'taille 48, sans seins': 19,
'taille 50, sans seins': 20,
'size-32-a': 11,
'size-34-a': 12,
'size-36-a': 13,
'size-38-a': 14,
'size-40-a': 15,
'size-42-a': 16,
'size-44-a': 17,
'size-46-a': 18,
'size-48-a': 19,
'size-50-a': 20,
'maat 32, zonder borsten': 11,
'maat 34, zonder borsten': 12,
'maat 36, zonder borsten': 13,
'maat 38, zonder borsten': 14,
'maat 40, zonder borsten': 15,
'maat 42, zonder borsten': 16,
'maat 44, zonder borsten': 17,
'maat 46, zonder borsten': 18,
'maat 48, zonder borsten': 19,
'maat 50, zonder borsten': 20,
'größe 32, ohne brüste': 11,
'größe 34, ohne brüste': 12,
'größe 36, ohne brüste': 13,
'größe 38, ohne brüste': 14,
'größe 40, ohne brüste': 15,
'größe 42, ohne brüste': 16,
'größe 44, ohne brüste': 17,
'größe 46, ohne brüste': 18,
'größe 48, ohne brüste': 19,
'größe 50, ohne brüste': 20,
'tamaño 32, sin pechos': 11,
'tamaño 34, sin pechos': 12,
'tamaño 36, sin pechos': 13,
'tamaño 38, sin pechos': 14,
'tamaño 40, sin pechos': 15,
'tamaño 42, sin pechos': 16,
'tamaño 44, sin pechos': 17,
'tamaño 46, sin pechos': 18,
'tamaño 48, sin pechos': 19,
'tamaño 50, sin pechos': 20,
}
/*
* This is a special route not available for API users
*/
PatternModel.prototype.import = async function (v2user, lut, userId) {
for (const [handle, pattern] of Object.entries(v2user.patterns)) {
let skip = false
const data = { ...migratePattern(pattern, userId), userId }
if (lut[pattern.person]) data.setId = lut[pattern.person]
else if (v2lut[pattern.person]) data.csetId = v2lut[pattern.person]
else if (pattern.person.length !== 5 && !['any', 'original'].includes(pattern.person)) {
console.log(`Cannot find ${pattern.person}`, pattern, { lut, v2lut })
process.exit()
}
if (!data.design || ['theo', 'ursula', 'unice'].includes(data.design)) skip = true
if (!skip) {
// V2 does not support images for patterns
data.img = 'default-avatar'
const cloaked = await this.cloak(data)
try {
this.record = await this.prisma.pattern.create({ data: cloaked })
} catch (err) {
log.warn(err, 'Could not create pattern')
console.log(data)
}
}
}
}

View file

@ -1,5 +1,5 @@
import { log } from '../utils/log.mjs'
import { replaceImage, storeImage } from '../utils/cloudflare-images.mjs'
import { replaceImage, storeImage, ensureImage, importImage } from '../utils/cloudflare-images.mjs'
import yaml from 'js-yaml'
export function SetModel(tools) {
@ -450,7 +450,6 @@ SetModel.prototype.sanitizeMeasurements = function (input) {
const migratePerson = (v2) => ({
createdAt: new Date(v2.created ? v2.created : v2.createdAt),
img: v2.picture,
imperial: v2.units === 'imperial',
name: v2.name || '--', // Encrypted, so always set _some_ value
notes: v2.notes || '--', // Encrypted, so always set _some_ value
@ -462,11 +461,13 @@ const migratePerson = (v2) => ({
* This is a special route not available for API users
*/
SetModel.prototype.import = async function (v2user, userId) {
for (const person of v2user.people) {
const lut = {} // lookup tabel for v2 handle to v3 id
for (const [handle, person] of Object.entries(v2user.people)) {
const data = { ...migratePerson(person), userId }
await this.unguardedCreate(data)
// Now that we have an ID, we can handle the image
if (data.img) {
if (person.picture && person.picture.slice(-4) !== '.svg') {
const imgId = `set-${this.record.id}`
const imgUrl =
'https://static.freesewing.org/users/' +
encodeURIComponent(v2user.handle.slice(0, 1)) +
@ -475,14 +476,26 @@ SetModel.prototype.import = async function (v2user, userId) {
'/people/' +
encodeURIComponent(person.handle) +
'/' +
encodeURIComponent(data.img)
console.log('Grabbing', imgUrl)
//const [contentType, imgData] = await downloadImage(imgUrl)
//// Do not import the default SVG avatar
//if (contentType !== 'image/svg+xml') {
// const img = await setSetAvatar(this.record.id, [contentType, imgData], data.name)
// data.img = img
//}
encodeURIComponent(person.picture)
data.img = await importImage({
id: imgId,
metadata: {
user: userId,
v2PersonHandle: handle,
},
url: imgUrl,
})
data.img = imgId
} else data.img = 'default-avatar'
const cloaked = await this.cloak(data)
try {
this.record = await this.prisma.set.create({ data: cloaked })
lut[handle] = this.record.id
} catch (err) {
log.warn(err, 'Could not create set')
console.log(person)
}
}
return lut
}

View file

@ -1,10 +1,11 @@
import jwt from 'jsonwebtoken'
import { log } from '../utils/log.mjs'
import { hash, hashPassword, randomString, verifyPassword } from '../utils/crypto.mjs'
import { replaceImage, ensureImage } from '../utils/cloudflare-images.mjs'
import { replaceImage, ensureImage, importImage } from '../utils/cloudflare-images.mjs'
import { clean, asJson, i18nUrl } from '../utils/index.mjs'
import { ConfirmationModel } from './confirmation.mjs'
import { SetModel } from './set.mjs'
import { PatternModel } from './pattern.mjs'
export function UserModel(tools) {
this.config = tools.config
@ -19,6 +20,7 @@ export function UserModel(tools) {
this.clear = {} // For holding decrypted data
// Only used for import, can be removed after v3 is released
this.Set = new SetModel(tools)
this.Pattern = new PatternModel(tools)
return this
}
@ -910,10 +912,10 @@ UserModel.prototype.import = async function (list) {
const data = migrateUser(sub)
await this.read({ ehash: data.ehash })
if (!this.record) {
/*
* Grab the image from the FreeSewing server and upload it to Sanity
*/
if (data.img) {
/*
* Figure out what image to grab from the FreeSewing v2 backend server
*/
const imgId = `user-${data.ihash}`
const imgUrl =
'https://static.freesewing.org/users/' +
@ -922,7 +924,7 @@ UserModel.prototype.import = async function (list) {
encodeURIComponent(sub.handle) +
'/' +
encodeURIComponent(data.img)
data.img = await ensureImage({
data.img = await importImage({
id: imgId,
metadata: {
user: `v2-${sub.handle}`,
@ -957,7 +959,9 @@ UserModel.prototype.import = async function (list) {
}
} else skipped.push(sub.email)
// That's the user, now load their people as sets
//if (sub.people) await this.Set.import(sub, this.record.id)
let lut = false
if (sub.people) lut = await this.Set.import(sub, this.record.id)
if (sub.patterns) await this.Pattern.import(sub, lut, this.record.id)
} else skipped.push(sub.email)
}

View file

@ -71,11 +71,25 @@ export async function ensureImage(props) {
result = await axios.post(config.api, form, { headers })
} catch (err) {
// It's fine
console.log(err)
}
return props.id
}
/*
* Method that imports and image from URL and does not bother waiting for the answer
*/
export async function importImage(props) {
// Bypass slow ass upload when testing import
if (!config.import) return `default-avatar`
const form = getFormData(props)
await axios.post(config.api, form, { headers })
return props.id
}
/*
* Helper method to construct the form data for cloudflare
*/

View file

@ -1,12 +1,16 @@
import dotenv from 'dotenv'
//import subscribers from './v2-newsletters.json' assert { type: 'json' }
import users from '../dump/v2-users.json' assert { type: 'json' }
import people from '../dump/v2-people.json' assert { type: 'json' }
import patterns from '../dump/v2-patterns.json' assert { type: 'json' }
dotenv.config()
const batchSize = 100
/*
* Only this token allows exporting data
*/
const import_token = 'TOKEN_HERE'
const import_token = process.env.IMPORT_TOKEN
/*
* Where to connect to?
@ -28,7 +32,7 @@ const importSubscribers = async () => {
console.log('Importing subscribers')
const count = subscribers.length
let total = 0
const batches = splitArray(subscribers, 50)
const batches = splitArray(subscribers, batchSize)
for (const batch of batches) {
const result = await fetch(`${BACKEND}/import/subscribers`, {
method: 'POST',
@ -83,7 +87,7 @@ const importUsers = async () => {
console.log('Importing users')
const count = todo.length
let total = 0
const batches = splitArray(todo, 50)
const batches = splitArray(todo, batchSize)
for (const batch of batches) {
const result = await fetch(`${BACKEND}/import/users`, {
method: 'POST',
@ -96,9 +100,8 @@ const importUsers = async () => {
}),
})
const data = await result.json()
total += data.imported
console.log(`${total}/${count} (${data.skipped} skipped)`)
console.log(data)
total += batchSize
console.log(`${total}/${count}`)
}
}

View file

@ -0,0 +1,20 @@
import { cloudflareImages as config } from '../src/config.mjs'
import axios from 'axios'
const headers = { Authorization: `Bearer ${config.token}` }
const toRemove = []
const result = await axios.get(`${config.api}?page=1&per_page=10000`, { headers })
const images = result.data.result.images.map((i) => i.id).filter((id) => id.slice(0, 4) === 'set-')
const total = images.length
if (total > 0) {
console.log(`${total} images to remove`)
let i = 1
for (const id of images) {
console.log(`${i}/${total} : Removing ${id}`)
await axios.delete(`${config.api}/${id}`, { headers })
i++
}
} else console.log('No images to remove')