technicolor commited on
Commit
6010ccf
·
verified ·
1 Parent(s): d8716c2

Upload 11 files

Browse files
Files changed (12) hide show
  1. .gitattributes +10 -0
  2. arxiv_0.csv +3 -0
  3. arxiv_1.csv +3 -0
  4. arxiv_2.csv +3 -0
  5. arxiv_3.csv +3 -0
  6. arxiv_4.csv +3 -0
  7. arxiv_5.csv +3 -0
  8. arxiv_6.csv +3 -0
  9. arxiv_7.csv +3 -0
  10. arxiv_8.csv +3 -0
  11. arxiv_9.csv +3 -0
  12. load_dataset.ipynb +550 -0
.gitattributes CHANGED
@@ -56,3 +56,13 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
56
  # Video files - compressed
57
  *.mp4 filter=lfs diff=lfs merge=lfs -text
58
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
56
  # Video files - compressed
57
  *.mp4 filter=lfs diff=lfs merge=lfs -text
58
  *.webm filter=lfs diff=lfs merge=lfs -text
59
+ arxiv_0.csv filter=lfs diff=lfs merge=lfs -text
60
+ arxiv_1.csv filter=lfs diff=lfs merge=lfs -text
61
+ arxiv_2.csv filter=lfs diff=lfs merge=lfs -text
62
+ arxiv_3.csv filter=lfs diff=lfs merge=lfs -text
63
+ arxiv_4.csv filter=lfs diff=lfs merge=lfs -text
64
+ arxiv_5.csv filter=lfs diff=lfs merge=lfs -text
65
+ arxiv_6.csv filter=lfs diff=lfs merge=lfs -text
66
+ arxiv_7.csv filter=lfs diff=lfs merge=lfs -text
67
+ arxiv_8.csv filter=lfs diff=lfs merge=lfs -text
68
+ arxiv_9.csv filter=lfs diff=lfs merge=lfs -text
arxiv_0.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e933e571ecfbce09c41c613a2f3d1944c5896b5d81891bbe81033d2f20fd6ea
3
+ size 324859290
arxiv_1.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46e603435ae3c0e5e1fe907ea120380dc118dfa3f7a42c3e3d532a1794c8cc3e
3
+ size 324862846
arxiv_2.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3574ef03e9bf226e41b32d24a79393e6b62939b9bc3c7e8da2794ae2676cb633
3
+ size 324875192
arxiv_3.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e5ab6fc59383de74516f40a8ee5e28d82461b12d0fd8e57c5c13467cec7b2c7
3
+ size 324915088
arxiv_4.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9359dbd40713e5ed4381f8ccd69d3b173dce9befb440323cb0be277d96a48a2e
3
+ size 324904748
arxiv_5.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ace8ed834148a259b8f22dd9bad254250373dc7932ccfa0f1d54c2aa2ea44e3
3
+ size 324883272
arxiv_6.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcf7c66c7aa76a1d90b17a55e096ec42e9604b617d350574e4007d6258d6e0eb
3
+ size 324913917
arxiv_7.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:061f21937a1b86be5de9c258385879cf87c78076f9f3af40dc446f17d9cfa985
3
+ size 324895299
arxiv_8.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b43efed9eb34f1731faf81a5b832f4117dab94e5496e56821bf33f63077262f9
3
+ size 324930094
arxiv_9.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:450b8aede1ab0b86cb9b916abefef3b4dcaea140e3a7c73c7ebc2ee7dc4c3d0c
3
+ size 324910208
load_dataset.ipynb ADDED
@@ -0,0 +1,550 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "name": "stderr",
10
+ "output_type": "stream",
11
+ "text": [
12
+ "c:\\Anaconda3\\lib\\site-packages\\pandas\\core\\arrays\\masked.py:60: UserWarning: Pandas requires version '1.3.6' or newer of 'bottleneck' (version '1.3.5' currently installed).\n",
13
+ " from pandas.core import (\n"
14
+ ]
15
+ }
16
+ ],
17
+ "source": [
18
+ "import pandas as pd\n",
19
+ "import os\n",
20
+ "import opendatasets as od"
21
+ ]
22
+ },
23
+ {
24
+ "cell_type": "code",
25
+ "execution_count": 2,
26
+ "metadata": {},
27
+ "outputs": [
28
+ {
29
+ "name": "stdout",
30
+ "output_type": "stream",
31
+ "text": [
32
+ "Please provide your Kaggle credentials to download this dataset. Learn more: http://bit.ly/kaggle-creds\n",
33
+ "Your Kaggle username:Your Kaggle Key:Your Kaggle Key:Dataset URL: https://www.kaggle.com/datasets/awester/arxiv-embeddings\n",
34
+ "Downloading arxiv-embeddings.zip to .\\arxiv-embeddings\n"
35
+ ]
36
+ },
37
+ {
38
+ "name": "stderr",
39
+ "output_type": "stream",
40
+ "text": [
41
+ "100%|██████████| 4.09G/4.09G [03:28<00:00, 21.1MB/s] \n"
42
+ ]
43
+ },
44
+ {
45
+ "name": "stdout",
46
+ "output_type": "stream",
47
+ "text": [
48
+ "\n"
49
+ ]
50
+ }
51
+ ],
52
+ "source": [
53
+ "# Assign the Kaggle data set URL into variable\n",
54
+ "dataset = 'https://www.kaggle.com/datasets/awester/arxiv-embeddings/data'\n",
55
+ "# Using opendatasets let's download the data sets\n",
56
+ "od.download(dataset)"
57
+ ]
58
+ },
59
+ {
60
+ "cell_type": "code",
61
+ "execution_count": 15,
62
+ "metadata": {},
63
+ "outputs": [
64
+ {
65
+ "ename": "KeyboardInterrupt",
66
+ "evalue": "",
67
+ "output_type": "error",
68
+ "traceback": [
69
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
70
+ "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
71
+ "\u001b[1;32mC:\\temp\\Temp\\ipykernel_2344\\708505339.py\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mdata\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mpd\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread_json\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"C:\\\\Users\\\\Gordon\\\\OneDrive - The Hong Kong Polytechnic University\\\\YEAR2 SEM2\\\\NLP\\\\URIS\\\\Dataset\\\\arxiv-embeddings\\\\ml-arxiv-embeddings.json\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
72
+ "\u001b[1;32mc:\\Anaconda3\\lib\\site-packages\\pandas\\io\\json\\_json.py\u001b[0m in \u001b[0;36mread_json\u001b[1;34m(path_or_buf, orient, typ, dtype, convert_axes, convert_dates, keep_default_dates, precise_float, date_unit, encoding, encoding_errors, lines, chunksize, compression, nrows, storage_options, dtype_backend, engine)\u001b[0m\n\u001b[0;32m 789\u001b[0m \u001b[0mconvert_axes\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 790\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 791\u001b[1;33m json_reader = JsonReader(\n\u001b[0m\u001b[0;32m 792\u001b[0m \u001b[0mpath_or_buf\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 793\u001b[0m \u001b[0morient\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0morient\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
73
+ "\u001b[1;32mc:\\Anaconda3\\lib\\site-packages\\pandas\\io\\json\\_json.py\u001b[0m in \u001b[0;36m__init__\u001b[1;34m(self, filepath_or_buffer, orient, typ, dtype, convert_axes, convert_dates, keep_default_dates, precise_float, date_unit, encoding, lines, chunksize, compression, nrows, storage_options, encoding_errors, dtype_backend, engine)\u001b[0m\n\u001b[0;32m 903\u001b[0m \u001b[1;32melif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mengine\u001b[0m \u001b[1;33m==\u001b[0m \u001b[1;34m\"ujson\"\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 904\u001b[0m \u001b[0mdata\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_get_data_from_filepath\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 905\u001b[1;33m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdata\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_preprocess_data\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 906\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 907\u001b[0m \u001b[1;32mdef\u001b[0m \u001b[0m_preprocess_data\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdata\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
74
+ "\u001b[1;32mc:\\Anaconda3\\lib\\site-packages\\pandas\\io\\json\\_json.py\u001b[0m in \u001b[0;36m_preprocess_data\u001b[1;34m(self, data)\u001b[0m\n\u001b[0;32m 915\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m\"read\"\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mand\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mchunksize\u001b[0m \u001b[1;32mor\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mnrows\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 916\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 917\u001b[1;33m \u001b[0mdata\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mdata\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 918\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m\"read\"\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mand\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mchunksize\u001b[0m \u001b[1;32mor\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mnrows\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 919\u001b[0m \u001b[0mdata\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mStringIO\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
75
+ "\u001b[1;32mc:\\Anaconda3\\lib\\codecs.py\u001b[0m in \u001b[0;36mdecode\u001b[1;34m(self, input, final)\u001b[0m\n\u001b[0;32m 317\u001b[0m \u001b[1;32mraise\u001b[0m \u001b[0mNotImplementedError\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 318\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 319\u001b[1;33m \u001b[1;32mdef\u001b[0m \u001b[0mdecode\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0minput\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfinal\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mFalse\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 320\u001b[0m \u001b[1;31m# decode input (taking the buffer into account)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 321\u001b[0m \u001b[0mdata\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mbuffer\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0minput\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
76
+ "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
77
+ ]
78
+ }
79
+ ],
80
+ "source": [
81
+ "data = pd.read_json(\"C:\\\\Users\\\\Gordon\\\\OneDrive - The Hong Kong Polytechnic University\\\\YEAR2 SEM2\\\\NLP\\\\URIS\\\\Dataset\\\\arxiv-embeddings\\\\ml-arxiv-embeddings.json\")"
82
+ ]
83
+ },
84
+ {
85
+ "cell_type": "code",
86
+ "execution_count": 27,
87
+ "metadata": {},
88
+ "outputs": [],
89
+ "source": [
90
+ "chunksize = 10000\n",
91
+ "chunks = []\n",
92
+ "i=0\n",
93
+ "for chunk in pd.read_json(\"C:\\\\Users\\\\Gordon\\\\OneDrive - The Hong Kong Polytechnic University\\\\YEAR2 SEM2\\\\NLP\\\\URIS\\\\Dataset\\\\arxiv-embeddings\\\\ml-arxiv-embeddings.json\", lines=True, chunksize=chunksize):\n",
94
+ " chunks.append(chunk)\n",
95
+ " i+=1\n",
96
+ " if i==10:\n",
97
+ " break\n",
98
+ "\n",
99
+ "# Now, 'chunks' is a list of DataFrame objects. You can concatenate them into a single DataFrame if needed:\n",
100
+ "# data = pd.concat(chunks)"
101
+ ]
102
+ },
103
+ {
104
+ "cell_type": "code",
105
+ "execution_count": 28,
106
+ "metadata": {},
107
+ "outputs": [
108
+ {
109
+ "data": {
110
+ "text/html": [
111
+ "<div>\n",
112
+ "<style scoped>\n",
113
+ " .dataframe tbody tr th:only-of-type {\n",
114
+ " vertical-align: middle;\n",
115
+ " }\n",
116
+ "\n",
117
+ " .dataframe tbody tr th {\n",
118
+ " vertical-align: top;\n",
119
+ " }\n",
120
+ "\n",
121
+ " .dataframe thead th {\n",
122
+ " text-align: right;\n",
123
+ " }\n",
124
+ "</style>\n",
125
+ "<table border=\"1\" class=\"dataframe\">\n",
126
+ " <thead>\n",
127
+ " <tr style=\"text-align: right;\">\n",
128
+ " <th></th>\n",
129
+ " <th>id</th>\n",
130
+ " <th>submitter</th>\n",
131
+ " <th>authors</th>\n",
132
+ " <th>title</th>\n",
133
+ " <th>comments</th>\n",
134
+ " <th>journal-ref</th>\n",
135
+ " <th>doi</th>\n",
136
+ " <th>report-no</th>\n",
137
+ " <th>categories</th>\n",
138
+ " <th>license</th>\n",
139
+ " <th>abstract</th>\n",
140
+ " <th>versions</th>\n",
141
+ " <th>update_date</th>\n",
142
+ " <th>authors_parsed</th>\n",
143
+ " <th>embedding</th>\n",
144
+ " </tr>\n",
145
+ " </thead>\n",
146
+ " <tbody>\n",
147
+ " <tr>\n",
148
+ " <th>80000</th>\n",
149
+ " <td>1906.05546</td>\n",
150
+ " <td>Da Sun Handason Tam</td>\n",
151
+ " <td>Da Sun Handason Tam, Wing Cheong Lau, Bin Hu, ...</td>\n",
152
+ " <td>Identifying Illicit Accounts in Large Scale E-...</td>\n",
153
+ " <td>None</td>\n",
154
+ " <td>None</td>\n",
155
+ " <td>None</td>\n",
156
+ " <td>None</td>\n",
157
+ " <td>cs.SI cs.LG</td>\n",
158
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
159
+ " <td>Rapid and massive adoption of mobile/ online...</td>\n",
160
+ " <td>[{'version': 'v1', 'created': 'Thu, 13 Jun 201...</td>\n",
161
+ " <td>2019-06-14</td>\n",
162
+ " <td>[[Tam, Da Sun Handason, ], [Lau, Wing Cheong, ...</td>\n",
163
+ " <td>[-0.005185681860893, 0.00532205728814, 0.01307...</td>\n",
164
+ " </tr>\n",
165
+ " <tr>\n",
166
+ " <th>80001</th>\n",
167
+ " <td>1906.05551</td>\n",
168
+ " <td>Kai Fan Dr</td>\n",
169
+ " <td>Pei Zhang, Boxing Chen, Niyu Ge, Kai Fan</td>\n",
170
+ " <td>Lattice Transformer for Speech Translation</td>\n",
171
+ " <td>accepted to ACL 2019</td>\n",
172
+ " <td>None</td>\n",
173
+ " <td>None</td>\n",
174
+ " <td>None</td>\n",
175
+ " <td>cs.CL</td>\n",
176
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
177
+ " <td>Recent advances in sequence modeling have hi...</td>\n",
178
+ " <td>[{'version': 'v1', 'created': 'Thu, 13 Jun 201...</td>\n",
179
+ " <td>2019-06-14</td>\n",
180
+ " <td>[[Zhang, Pei, ], [Chen, Boxing, ], [Ge, Niyu, ...</td>\n",
181
+ " <td>[-0.0306410882622, 0.004218348767608, 0.018301...</td>\n",
182
+ " </tr>\n",
183
+ " <tr>\n",
184
+ " <th>80002</th>\n",
185
+ " <td>1906.05560</td>\n",
186
+ " <td>Hung-Hsuan Chen</td>\n",
187
+ " <td>Yu-Wei Kao and Hung-Hsuan Chen</td>\n",
188
+ " <td>Associated Learning: Decomposing End-to-end Ba...</td>\n",
189
+ " <td>34 pages, 6 figures, 7 tables</td>\n",
190
+ " <td>MIT Neural Computation 33(1), 2021</td>\n",
191
+ " <td>None</td>\n",
192
+ " <td>None</td>\n",
193
+ " <td>cs.NE cs.LG stat.ML</td>\n",
194
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
195
+ " <td>Backpropagation (BP) is the cornerstone of t...</td>\n",
196
+ " <td>[{'version': 'v1', 'created': 'Thu, 13 Jun 201...</td>\n",
197
+ " <td>2021-02-10</td>\n",
198
+ " <td>[[Kao, Yu-Wei, ], [Chen, Hung-Hsuan, ]]</td>\n",
199
+ " <td>[-0.030108174309134, 0.014727415516972, 0.0341...</td>\n",
200
+ " </tr>\n",
201
+ " <tr>\n",
202
+ " <th>80003</th>\n",
203
+ " <td>1906.05571</td>\n",
204
+ " <td>Ting Yao</td>\n",
205
+ " <td>Zhaofan Qiu and Ting Yao and Chong-Wah Ngo and...</td>\n",
206
+ " <td>Learning Spatio-Temporal Representation with L...</td>\n",
207
+ " <td>CVPR 2019</td>\n",
208
+ " <td>None</td>\n",
209
+ " <td>None</td>\n",
210
+ " <td>None</td>\n",
211
+ " <td>cs.CV</td>\n",
212
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
213
+ " <td>Convolutional Neural Networks (CNN) have bee...</td>\n",
214
+ " <td>[{'version': 'v1', 'created': 'Thu, 13 Jun 201...</td>\n",
215
+ " <td>2019-06-14</td>\n",
216
+ " <td>[[Qiu, Zhaofan, ], [Yao, Ting, ], [Ngo, Chong-...</td>\n",
217
+ " <td>[-0.015157531015574, 0.035704407840967005, 0.0...</td>\n",
218
+ " </tr>\n",
219
+ " <tr>\n",
220
+ " <th>80004</th>\n",
221
+ " <td>1906.05572</td>\n",
222
+ " <td>Wenquan Wu</td>\n",
223
+ " <td>Wenquan Wu, Zhen Guo, Xiangyang Zhou, Hua Wu, ...</td>\n",
224
+ " <td>Proactive Human-Machine Conversation with Expl...</td>\n",
225
+ " <td>Accepted by ACL 2019</td>\n",
226
+ " <td>None</td>\n",
227
+ " <td>None</td>\n",
228
+ " <td>None</td>\n",
229
+ " <td>cs.CL</td>\n",
230
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
231
+ " <td>Though great progress has been made for huma...</td>\n",
232
+ " <td>[{'version': 'v1', 'created': 'Thu, 13 Jun 201...</td>\n",
233
+ " <td>2019-11-11</td>\n",
234
+ " <td>[[Wu, Wenquan, ], [Guo, Zhen, ], [Zhou, Xiangy...</td>\n",
235
+ " <td>[-0.020636107772588, -0.017156293615698003, 0....</td>\n",
236
+ " </tr>\n",
237
+ " <tr>\n",
238
+ " <th>...</th>\n",
239
+ " <td>...</td>\n",
240
+ " <td>...</td>\n",
241
+ " <td>...</td>\n",
242
+ " <td>...</td>\n",
243
+ " <td>...</td>\n",
244
+ " <td>...</td>\n",
245
+ " <td>...</td>\n",
246
+ " <td>...</td>\n",
247
+ " <td>...</td>\n",
248
+ " <td>...</td>\n",
249
+ " <td>...</td>\n",
250
+ " <td>...</td>\n",
251
+ " <td>...</td>\n",
252
+ " <td>...</td>\n",
253
+ " <td>...</td>\n",
254
+ " </tr>\n",
255
+ " <tr>\n",
256
+ " <th>89995</th>\n",
257
+ " <td>1909.12898</td>\n",
258
+ " <td>Mahsa Ghasemi</td>\n",
259
+ " <td>Mahsa Ghasemi, Abolfazl Hashemi, Haris Vikalo,...</td>\n",
260
+ " <td>Identifying Sparse Low-Dimensional Structures ...</td>\n",
261
+ " <td>Accepted for publication in American Control C...</td>\n",
262
+ " <td>None</td>\n",
263
+ " <td>None</td>\n",
264
+ " <td>None</td>\n",
265
+ " <td>cs.LG cs.SY eess.SY stat.ML</td>\n",
266
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
267
+ " <td>We consider the problem of learning low-dime...</td>\n",
268
+ " <td>[{'version': 'v1', 'created': 'Fri, 27 Sep 201...</td>\n",
269
+ " <td>2020-04-09</td>\n",
270
+ " <td>[[Ghasemi, Mahsa, ], [Hashemi, Abolfazl, ], [V...</td>\n",
271
+ " <td>[-0.015149267390370001, 0.020566524937748, 0.0...</td>\n",
272
+ " </tr>\n",
273
+ " <tr>\n",
274
+ " <th>89996</th>\n",
275
+ " <td>1909.12901</td>\n",
276
+ " <td>Feifan Wang</td>\n",
277
+ " <td>Feifan Wang, Runzhou Jiang, Liqin Zheng, Chun ...</td>\n",
278
+ " <td>3D U-Net Based Brain Tumor Segmentation and Su...</td>\n",
279
+ " <td>Third place award of the 2019 MICCAI BraTS cha...</td>\n",
280
+ " <td>None</td>\n",
281
+ " <td>10.1007/978-3-030-46640-4_13</td>\n",
282
+ " <td>None</td>\n",
283
+ " <td>eess.IV cs.CV</td>\n",
284
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
285
+ " <td>Past few years have witnessed the prevalence...</td>\n",
286
+ " <td>[{'version': 'v1', 'created': 'Sun, 15 Sep 201...</td>\n",
287
+ " <td>2020-05-26</td>\n",
288
+ " <td>[[Wang, Feifan, ], [Jiang, Runzhou, ], [Zheng,...</td>\n",
289
+ " <td>[0.0012591709382830001, 0.003147927578538, 0.0...</td>\n",
290
+ " </tr>\n",
291
+ " <tr>\n",
292
+ " <th>89997</th>\n",
293
+ " <td>1909.12902</td>\n",
294
+ " <td>Denys Dutykh</td>\n",
295
+ " <td>Beno\\^it Colange and Laurent Vuillon and Sylva...</td>\n",
296
+ " <td>Interpreting Distortions in Dimensionality Red...</td>\n",
297
+ " <td>5 pages, 6 figures, 22 references. Paper prese...</td>\n",
298
+ " <td>Paper presented at IEEE Vis 2019 conference at...</td>\n",
299
+ " <td>10.1109/VISUAL.2019.8933568</td>\n",
300
+ " <td>None</td>\n",
301
+ " <td>cs.CV cs.IR cs.LG</td>\n",
302
+ " <td>http://creativecommons.org/licenses/by-nc-sa/4.0/</td>\n",
303
+ " <td>To perform visual data exploration, many dim...</td>\n",
304
+ " <td>[{'version': 'v1', 'created': 'Fri, 20 Sep 201...</td>\n",
305
+ " <td>2020-02-20</td>\n",
306
+ " <td>[[Colange, Benoît, ], [Vuillon, Laurent, ], [L...</td>\n",
307
+ " <td>[-0.009024421684443, 0.018310621380805, 0.0397...</td>\n",
308
+ " </tr>\n",
309
+ " <tr>\n",
310
+ " <th>89998</th>\n",
311
+ " <td>1909.12903</td>\n",
312
+ " <td>Shupeng Gui</td>\n",
313
+ " <td>Shupeng Gui, Xiangliang Zhang, Pan Zhong, Shua...</td>\n",
314
+ " <td>PINE: Universal Deep Embedding for Graph Nodes...</td>\n",
315
+ " <td>24 pages, 4 figures, 3 tables. arXiv admin not...</td>\n",
316
+ " <td>None</td>\n",
317
+ " <td>None</td>\n",
318
+ " <td>None</td>\n",
319
+ " <td>cs.LG stat.ML</td>\n",
320
+ " <td>http://arxiv.org/licenses/nonexclusive-distrib...</td>\n",
321
+ " <td>Graph node embedding aims at learning a vect...</td>\n",
322
+ " <td>[{'version': 'v1', 'created': 'Wed, 25 Sep 201...</td>\n",
323
+ " <td>2019-10-01</td>\n",
324
+ " <td>[[Gui, Shupeng, ], [Zhang, Xiangliang, ], [Zho...</td>\n",
325
+ " <td>[0.003639858681708, -0.005150159355252, 0.0067...</td>\n",
326
+ " </tr>\n",
327
+ " <tr>\n",
328
+ " <th>89999</th>\n",
329
+ " <td>1909.12906</td>\n",
330
+ " <td>Karol Arndt</td>\n",
331
+ " <td>Karol Arndt, Murtaza Hazara, Ali Ghadirzadeh, ...</td>\n",
332
+ " <td>Meta Reinforcement Learning for Sim-to-real Do...</td>\n",
333
+ " <td>Submitted to ICRA 2020</td>\n",
334
+ " <td>None</td>\n",
335
+ " <td>None</td>\n",
336
+ " <td>None</td>\n",
337
+ " <td>cs.CV cs.RO</td>\n",
338
+ " <td>http://creativecommons.org/licenses/by/4.0/</td>\n",
339
+ " <td>Modern reinforcement learning methods suffer...</td>\n",
340
+ " <td>[{'version': 'v1', 'created': 'Mon, 16 Sep 201...</td>\n",
341
+ " <td>2019-10-01</td>\n",
342
+ " <td>[[Arndt, Karol, ], [Hazara, Murtaza, ], [Ghadi...</td>\n",
343
+ " <td>[0.0035310059320180004, -0.009807205758988, 0....</td>\n",
344
+ " </tr>\n",
345
+ " </tbody>\n",
346
+ "</table>\n",
347
+ "<p>10000 rows × 15 columns</p>\n",
348
+ "</div>"
349
+ ],
350
+ "text/plain": [
351
+ " id submitter \\\n",
352
+ "80000 1906.05546 Da Sun Handason Tam \n",
353
+ "80001 1906.05551 Kai Fan Dr \n",
354
+ "80002 1906.05560 Hung-Hsuan Chen \n",
355
+ "80003 1906.05571 Ting Yao \n",
356
+ "80004 1906.05572 Wenquan Wu \n",
357
+ "... ... ... \n",
358
+ "89995 1909.12898 Mahsa Ghasemi \n",
359
+ "89996 1909.12901 Feifan Wang \n",
360
+ "89997 1909.12902 Denys Dutykh \n",
361
+ "89998 1909.12903 Shupeng Gui \n",
362
+ "89999 1909.12906 Karol Arndt \n",
363
+ "\n",
364
+ " authors \\\n",
365
+ "80000 Da Sun Handason Tam, Wing Cheong Lau, Bin Hu, ... \n",
366
+ "80001 Pei Zhang, Boxing Chen, Niyu Ge, Kai Fan \n",
367
+ "80002 Yu-Wei Kao and Hung-Hsuan Chen \n",
368
+ "80003 Zhaofan Qiu and Ting Yao and Chong-Wah Ngo and... \n",
369
+ "80004 Wenquan Wu, Zhen Guo, Xiangyang Zhou, Hua Wu, ... \n",
370
+ "... ... \n",
371
+ "89995 Mahsa Ghasemi, Abolfazl Hashemi, Haris Vikalo,... \n",
372
+ "89996 Feifan Wang, Runzhou Jiang, Liqin Zheng, Chun ... \n",
373
+ "89997 Beno\\^it Colange and Laurent Vuillon and Sylva... \n",
374
+ "89998 Shupeng Gui, Xiangliang Zhang, Pan Zhong, Shua... \n",
375
+ "89999 Karol Arndt, Murtaza Hazara, Ali Ghadirzadeh, ... \n",
376
+ "\n",
377
+ " title \\\n",
378
+ "80000 Identifying Illicit Accounts in Large Scale E-... \n",
379
+ "80001 Lattice Transformer for Speech Translation \n",
380
+ "80002 Associated Learning: Decomposing End-to-end Ba... \n",
381
+ "80003 Learning Spatio-Temporal Representation with L... \n",
382
+ "80004 Proactive Human-Machine Conversation with Expl... \n",
383
+ "... ... \n",
384
+ "89995 Identifying Sparse Low-Dimensional Structures ... \n",
385
+ "89996 3D U-Net Based Brain Tumor Segmentation and Su... \n",
386
+ "89997 Interpreting Distortions in Dimensionality Red... \n",
387
+ "89998 PINE: Universal Deep Embedding for Graph Nodes... \n",
388
+ "89999 Meta Reinforcement Learning for Sim-to-real Do... \n",
389
+ "\n",
390
+ " comments \\\n",
391
+ "80000 None \n",
392
+ "80001 accepted to ACL 2019 \n",
393
+ "80002 34 pages, 6 figures, 7 tables \n",
394
+ "80003 CVPR 2019 \n",
395
+ "80004 Accepted by ACL 2019 \n",
396
+ "... ... \n",
397
+ "89995 Accepted for publication in American Control C... \n",
398
+ "89996 Third place award of the 2019 MICCAI BraTS cha... \n",
399
+ "89997 5 pages, 6 figures, 22 references. Paper prese... \n",
400
+ "89998 24 pages, 4 figures, 3 tables. arXiv admin not... \n",
401
+ "89999 Submitted to ICRA 2020 \n",
402
+ "\n",
403
+ " journal-ref \\\n",
404
+ "80000 None \n",
405
+ "80001 None \n",
406
+ "80002 MIT Neural Computation 33(1), 2021 \n",
407
+ "80003 None \n",
408
+ "80004 None \n",
409
+ "... ... \n",
410
+ "89995 None \n",
411
+ "89996 None \n",
412
+ "89997 Paper presented at IEEE Vis 2019 conference at... \n",
413
+ "89998 None \n",
414
+ "89999 None \n",
415
+ "\n",
416
+ " doi report-no categories \\\n",
417
+ "80000 None None cs.SI cs.LG \n",
418
+ "80001 None None cs.CL \n",
419
+ "80002 None None cs.NE cs.LG stat.ML \n",
420
+ "80003 None None cs.CV \n",
421
+ "80004 None None cs.CL \n",
422
+ "... ... ... ... \n",
423
+ "89995 None None cs.LG cs.SY eess.SY stat.ML \n",
424
+ "89996 10.1007/978-3-030-46640-4_13 None eess.IV cs.CV \n",
425
+ "89997 10.1109/VISUAL.2019.8933568 None cs.CV cs.IR cs.LG \n",
426
+ "89998 None None cs.LG stat.ML \n",
427
+ "89999 None None cs.CV cs.RO \n",
428
+ "\n",
429
+ " license \\\n",
430
+ "80000 http://arxiv.org/licenses/nonexclusive-distrib... \n",
431
+ "80001 http://arxiv.org/licenses/nonexclusive-distrib... \n",
432
+ "80002 http://arxiv.org/licenses/nonexclusive-distrib... \n",
433
+ "80003 http://arxiv.org/licenses/nonexclusive-distrib... \n",
434
+ "80004 http://arxiv.org/licenses/nonexclusive-distrib... \n",
435
+ "... ... \n",
436
+ "89995 http://arxiv.org/licenses/nonexclusive-distrib... \n",
437
+ "89996 http://arxiv.org/licenses/nonexclusive-distrib... \n",
438
+ "89997 http://creativecommons.org/licenses/by-nc-sa/4.0/ \n",
439
+ "89998 http://arxiv.org/licenses/nonexclusive-distrib... \n",
440
+ "89999 http://creativecommons.org/licenses/by/4.0/ \n",
441
+ "\n",
442
+ " abstract \\\n",
443
+ "80000 Rapid and massive adoption of mobile/ online... \n",
444
+ "80001 Recent advances in sequence modeling have hi... \n",
445
+ "80002 Backpropagation (BP) is the cornerstone of t... \n",
446
+ "80003 Convolutional Neural Networks (CNN) have bee... \n",
447
+ "80004 Though great progress has been made for huma... \n",
448
+ "... ... \n",
449
+ "89995 We consider the problem of learning low-dime... \n",
450
+ "89996 Past few years have witnessed the prevalence... \n",
451
+ "89997 To perform visual data exploration, many dim... \n",
452
+ "89998 Graph node embedding aims at learning a vect... \n",
453
+ "89999 Modern reinforcement learning methods suffer... \n",
454
+ "\n",
455
+ " versions update_date \\\n",
456
+ "80000 [{'version': 'v1', 'created': 'Thu, 13 Jun 201... 2019-06-14 \n",
457
+ "80001 [{'version': 'v1', 'created': 'Thu, 13 Jun 201... 2019-06-14 \n",
458
+ "80002 [{'version': 'v1', 'created': 'Thu, 13 Jun 201... 2021-02-10 \n",
459
+ "80003 [{'version': 'v1', 'created': 'Thu, 13 Jun 201... 2019-06-14 \n",
460
+ "80004 [{'version': 'v1', 'created': 'Thu, 13 Jun 201... 2019-11-11 \n",
461
+ "... ... ... \n",
462
+ "89995 [{'version': 'v1', 'created': 'Fri, 27 Sep 201... 2020-04-09 \n",
463
+ "89996 [{'version': 'v1', 'created': 'Sun, 15 Sep 201... 2020-05-26 \n",
464
+ "89997 [{'version': 'v1', 'created': 'Fri, 20 Sep 201... 2020-02-20 \n",
465
+ "89998 [{'version': 'v1', 'created': 'Wed, 25 Sep 201... 2019-10-01 \n",
466
+ "89999 [{'version': 'v1', 'created': 'Mon, 16 Sep 201... 2019-10-01 \n",
467
+ "\n",
468
+ " authors_parsed \\\n",
469
+ "80000 [[Tam, Da Sun Handason, ], [Lau, Wing Cheong, ... \n",
470
+ "80001 [[Zhang, Pei, ], [Chen, Boxing, ], [Ge, Niyu, ... \n",
471
+ "80002 [[Kao, Yu-Wei, ], [Chen, Hung-Hsuan, ]] \n",
472
+ "80003 [[Qiu, Zhaofan, ], [Yao, Ting, ], [Ngo, Chong-... \n",
473
+ "80004 [[Wu, Wenquan, ], [Guo, Zhen, ], [Zhou, Xiangy... \n",
474
+ "... ... \n",
475
+ "89995 [[Ghasemi, Mahsa, ], [Hashemi, Abolfazl, ], [V... \n",
476
+ "89996 [[Wang, Feifan, ], [Jiang, Runzhou, ], [Zheng,... \n",
477
+ "89997 [[Colange, Benoît, ], [Vuillon, Laurent, ], [L... \n",
478
+ "89998 [[Gui, Shupeng, ], [Zhang, Xiangliang, ], [Zho... \n",
479
+ "89999 [[Arndt, Karol, ], [Hazara, Murtaza, ], [Ghadi... \n",
480
+ "\n",
481
+ " embedding \n",
482
+ "80000 [-0.005185681860893, 0.00532205728814, 0.01307... \n",
483
+ "80001 [-0.0306410882622, 0.004218348767608, 0.018301... \n",
484
+ "80002 [-0.030108174309134, 0.014727415516972, 0.0341... \n",
485
+ "80003 [-0.015157531015574, 0.035704407840967005, 0.0... \n",
486
+ "80004 [-0.020636107772588, -0.017156293615698003, 0.... \n",
487
+ "... ... \n",
488
+ "89995 [-0.015149267390370001, 0.020566524937748, 0.0... \n",
489
+ "89996 [0.0012591709382830001, 0.003147927578538, 0.0... \n",
490
+ "89997 [-0.009024421684443, 0.018310621380805, 0.0397... \n",
491
+ "89998 [0.003639858681708, -0.005150159355252, 0.0067... \n",
492
+ "89999 [0.0035310059320180004, -0.009807205758988, 0.... \n",
493
+ "\n",
494
+ "[10000 rows x 15 columns]"
495
+ ]
496
+ },
497
+ "execution_count": 28,
498
+ "metadata": {},
499
+ "output_type": "execute_result"
500
+ }
501
+ ],
502
+ "source": [
503
+ "chunks[8]"
504
+ ]
505
+ },
506
+ {
507
+ "cell_type": "code",
508
+ "execution_count": 29,
509
+ "metadata": {},
510
+ "outputs": [],
511
+ "source": [
512
+ "new_data = []\n",
513
+ "for p in chunks:\n",
514
+ " temp = p[[\"id\",\"title\",\"embedding\"]]\n",
515
+ " new_data.append(temp)"
516
+ ]
517
+ },
518
+ {
519
+ "cell_type": "code",
520
+ "execution_count": 30,
521
+ "metadata": {},
522
+ "outputs": [],
523
+ "source": [
524
+ "for i, df in enumerate(new_data):\n",
525
+ " df.to_csv(f\"arxiv_{i}.csv\", index=False)"
526
+ ]
527
+ }
528
+ ],
529
+ "metadata": {
530
+ "kernelspec": {
531
+ "display_name": "Python 3",
532
+ "language": "python",
533
+ "name": "python3"
534
+ },
535
+ "language_info": {
536
+ "codemirror_mode": {
537
+ "name": "ipython",
538
+ "version": 3
539
+ },
540
+ "file_extension": ".py",
541
+ "mimetype": "text/x-python",
542
+ "name": "python",
543
+ "nbconvert_exporter": "python",
544
+ "pygments_lexer": "ipython3",
545
+ "version": "3.9.16"
546
+ }
547
+ },
548
+ "nbformat": 4,
549
+ "nbformat_minor": 2
550
+ }