cakiki commited on
Commit
b4ae29f
·
1 Parent(s): 94049af

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -6
README.md CHANGED
@@ -31,15 +31,13 @@ from openTSNE import TSNE
31
  import datashader as ds
32
  import colorcet as cc
33
 
34
- import vectorizers
35
- from vectorizers.transformers import CountFeatureCompressionTransformer, InformationWeightTransformer
36
 
37
- from dask.distributed import Client, LocalCluster
38
  import dask.dataframe as dd
39
- import dask_ml.feature_extraction.text
40
  import dask.bag as db
41
 
42
- from transformers import AutoTokenizer, AutoModel
43
  from datasets import load_dataset
44
  from datasets.utils.py_utils import convert_file_size_to_int
45
 
@@ -65,7 +63,7 @@ for shard_index in tqdm(range(num_shards)):
65
  shard = dset.shard(num_shards=num_shards, index=shard_index, contiguous=True)
66
  shard.to_parquet(f"{dset_name}/tokenized/tokenized-{shard_index:03d}.parquet")
67
 
68
- client = Client()
69
  client
70
 
71
  df = dd.read_parquet(f'{dset_name}/tokenized/')
 
31
  import datashader as ds
32
  import colorcet as cc
33
 
 
 
34
 
35
+ from dask.distributed import Client
36
  import dask.dataframe as dd
37
+ import dask_ml
38
  import dask.bag as db
39
 
40
+ from transformers import AutoTokenizer
41
  from datasets import load_dataset
42
  from datasets.utils.py_utils import convert_file_size_to_int
43
 
 
63
  shard = dset.shard(num_shards=num_shards, index=shard_index, contiguous=True)
64
  shard.to_parquet(f"{dset_name}/tokenized/tokenized-{shard_index:03d}.parquet")
65
 
66
+ client = Client() # To keep track of dask computation
67
  client
68
 
69
  df = dd.read_parquet(f'{dset_name}/tokenized/')