-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhf_data.py
43 lines (34 loc) · 1.37 KB
/
hf_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
import requests
import fire
import pyarrow.parquet as pq
from datasets import concatenate_datasets, Dataset
from dotenv import dotenv_values
from resrer.eval import evaluate_remote_dataset
config = dotenv_values(".env")
def evaluate(token=config['HF_TOKEN'], dataset='seonglae/nq_open-validation', contain=None,
exclude=None, upload=False, debug=False):
headers = {"Authorization": f"Bearer {token}"}
url = f"https://datasets-server.huggingface.co/splits?dataset={dataset}"
response = requests.get(url, headers=headers, timeout=10)
data = response.json()
for split in data['splits']:
if contain and str(contain) not in split['config']:
continue
if exclude and str(exclude) in split['config']:
continue
result = evaluate_remote_dataset(dataset, split['config'], token=token, upload=upload, debug=debug)
print(f"{split['config']}: {result}")
return 'Done'
def upload(repo='seonglae/resrer-nq', folder='data/train/', token=config['HF_TOKEN']):
parquets = os.listdir(folder)
arrows = list(map(lambda path: pq.read_table(
folder + path, memory_map=True), parquets))
datasets = list(map(Dataset, arrows))
dataset = concatenate_datasets(datasets)
ds = dataset.filter(lambda row: row['summarization_text'] != '')
print(ds)
ds.push_to_hub(repo_id=repo, token=token)
return 'Done'
if __name__ == '__main__':
fire.Fire()