-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.js
99 lines (76 loc) · 2.91 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
const sanityClient = require("@sanity/client");
const exportDataset = require("@sanity/export");
const sanityImport = require("@sanity/import");
const core = require("@actions/core");
const path = require("path");
const fs = require("fs");
const datasetToReset = core.getInput("dataset_to_reset");
const datasetToMirror = core.getInput("dataset_to_mirror");
const sanityToken = core.getInput("sanity_token");
const sanityProjectId = core.getInput("sanity_project_id");
const NON_SYSTEM_DOCUMENTS_QUERY = `*[ !(_type match "system*")]`;
const client = sanityClient({
projectId: sanityProjectId,
token: sanityToken,
useCdn: false,
dataset: datasetToReset,
});
async function main() {
const allDatasets = await client.datasets.list();
if (!allDatasets.some(dataset => dataset.name === datasetToReset)) {
throw new Error(`Dataset '${datasetToReset}' does not exist.`);
}
if (!allDatasets.some(dataset => dataset.name === datasetToMirror)) {
throw new Error(`Dataset '${datasetToMirror}' does not exist.`);
}
const numberOfNonSystemDocuments = await client.fetch(
`count(${NON_SYSTEM_DOCUMENTS_QUERY})`
);
if (numberOfNonSystemDocuments > 0) {
core.debug(
`Found ${numberOfNonSystemDocuments} existing documents within '${datasetToReset}'. Removing them...`
);
}
let remainingDocuemntsToDelete = numberOfNonSystemDocuments;
while (remainingDocuemntsToDelete > 0) {
// From the sanity documentation:
// For the time being you should not delete more than ~1000 documents in
// one transaction. This will change in the future.
// Ref: https://www.sanity.io/docs/http-mutations#deleting-multiple-documents-by-query-d8ebd1878516
const { results } = await client.mutate([
{ delete: { query: `*[ !(_type match "system*")][0..499]` } }
]);
remainingDocuemntsToDelete = await client.fetch(
`count(${NON_SYSTEM_DOCUMENTS_QUERY})`
);
core.debug(
`Removed ${results.length} documents, ${remainingDocuemntsToDelete} remaining.`
);
}
core.debug(`Exporting '${datasetToMirror}' dataset...`);
const outputPath = path.join(__dirname, "exported.tar.gz");
await exportDataset({
client,
outputPath,
dataset: datasetToMirror,
raw: false,
drafts: false,
assetConcurrency: 12
});
core.debug(`Importing '${datasetToMirror}' into '${datasetToReset}'...`);
const exportedDataset = fs.createReadStream(outputPath);
const { numDocs, warnings } = await sanityImport(exportedDataset, {
client,
operation: "createOrReplace"
});
fs.unlinkSync(outputPath);
warnings.forEach(warning => core.debug(`Warning: ${warning}`));
core.debug(`Imported ${numDocs} docuemnts.`);
core.setOutput(
"result",
`Removed ${numberOfNonSystemDocuments} from '${datasetToReset}', and imported ${numDocs} docuemnts from '${datasetToMirror}' into '${datasetToReset}'`
);
}
main()
.then(result => core.setOutput("result", result))
.catch(error => core.setFailed(error.message));