-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.ts
118 lines (96 loc) · 3.85 KB
/
utils.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import { createObjectCsvWriter } from 'csv-writer'
import { ObjectStringifierHeader } from 'csv-writer/src/lib/record'
import { existsSync, mkdirSync, writeFileSync } from 'fs'
import 'isomorphic-fetch'
require('dotenv').config()
export const COVALENT_API_KEY = process.env.COVALENTHQ_API_KEY
export const INFURA_URL = process.env.INFURA_URL
export function toISOString(seconds: number) {
return seconds && new Date(seconds * 1000).toISOString()
}
const delay = (ms: number) => new Promise(resolve => setTimeout(resolve, ms))
export async function fetchURL(url: string, options?: RequestInit, retry?: number): Promise<any> {
retry = retry === undefined ? 3 : retry
try {
let res = await fetch(url, options)
return await res.json()
} catch (err) {
if (retry <= 0) {
console.error('Fetch Error')
throw err
}
await delay(2000)
return await fetchURL(url, options, retry - 1)
}
}
export async function fetchGraphQL(url: string, collection: string, where: string, orderBy: string, fields: string, first?: number) {
const elements = []
first = first || 1000
while (true) {
const skip = elements.length
const query = `query { ${collection} (first: ${first}, skip: ${skip}, where: { ${where} }, orderBy: "${orderBy}", orderDirection: desc) { ${fields} }}`
const json = await fetchURL(url, {
headers: { 'content-type': 'application/json' },
body: JSON.stringify({ 'query': query, 'variables': null }),
method: 'POST'
})
if (json.errors) {
console.log(elements[skip - 1])
throw Error('GraphQL Fetch Error ' + json.errors[0].message)
}
if (!json.data || !json.data[collection] || !json.data[collection].length) break
elements.push(...json.data[collection])
}
return elements
}
function removeDuplicates(data: any[], dataKey: string): any[] {
const dataMap = {}
for (const item of data) {
dataMap[item[dataKey]] = item
}
return Object.values(dataMap)
}
export async function fetchGraphQLCondition(url: string, collection: string, fieldNameCondition: string, dataKey: string, fields: string, first?: number) {
const elements = []
first = first || 1000
let lastField = 0
while (true) {
const query = `query { ${collection} (first: ${first}, where: { ${fieldNameCondition}_gt: ${lastField} }, orderBy: "${fieldNameCondition}", orderDirection: asc) { ${fields} }}`
const json = await fetchURL(url, {
headers: { 'content-type': 'application/json' },
body: JSON.stringify({ 'query': query, 'variables': null }),
method: 'POST'
})
if (json.errors) {
throw Error('GraphQL Condition Fetch Error ' + json.errors[0].message)
}
if (!json.data || !json.data[collection] || !json.data[collection].length) break
elements.push(...json.data[collection])
lastField = elements[elements.length - 1][fieldNameCondition]
}
return removeDuplicates(elements, dataKey)
}
export function saveToFile(name: string, data: string) {
if (!existsSync('./public')) mkdirSync('./public')
const path = './public/' + name
writeFileSync(path, data, 'utf8')
}
export function saveToJSON(name: string, data: any) {
saveToFile(name, JSON.stringify(data))
console.log('The JSON file has been saved.')
}
export async function saveToCSV(name: string, data: any, header: ObjectStringifierHeader) {
if (!existsSync('./public')) mkdirSync('./public')
const path = './public/' + name
const csvWriter = createObjectCsvWriter({ path, header })
await csvWriter.writeRecords(data)
console.log('The CSV file has been saved.')
}
export function flattenArray<Type>(arr: Type[][]): Type[] {
return arr.reduce((acc, val) => acc.concat(val), [])
}
export function splitArray<Type>(array: Type[], chunkSize: number) {
return Array(Math.ceil(array.length / chunkSize)).fill(null).map(function(_, i) {
return array.slice(i * chunkSize, i * chunkSize + chunkSize)
})
}