__init__.py 2.7 KB
Newer Older
Tiago Peixoto's avatar
Tiago Peixoto committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Tiago de Paula Peixoto <tiago@skewed.de>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

from .. import *

title = "arXiv citation networks (1993-2003)"
description = "Citations among papers posted on arxiv.org under the hep-ph and hep-th categories, between 1993 and 2003. This time begins a few months after axiv was launched. If a paper i cites a paper j also in this data set, then a directed edge connects i to j. (Papers not in the data set are excluded.) These data were originally released as part of the 2003 KDD Cup."
tags = ['Informational', 'Citation', 'Unweighted']
url = 'http://snap.stanford.edu/data/cit-HepPh.html'
citation = [('J. Gehrke, P. Ginsparg, J. M. Kleinberg. "Overview of the 2003 KDD Cup." SIGKDD Explorations 5(2), 149-151 (2003)', 'http://www.cs.cmu.edu/~jure/pubs/powergrowth-kdd05.pdf')]
icon_hash = '571512be6afce8e9253875e8'
ustream_license = None
upstream_prefix = 'https://snap.stanford.edu/data'
files = [(("cit-HepPh.txt.gz", "cit-HepPh-dates.txt.gz"), "HepPh", "snap"),
         (("cit-HepTh.txt.gz", "cit-HepTh-dates.txt.gz"), "HepTh", "snap")]

def fetch_upstream(force=False):
    return fetch_upstream_files(__name__.split(".")[-1], upstream_prefix, files,
                                force)

@cache_network()
@coerce_props()
@annotate()
def parse(alts=None):
    global files
    name = __name__.split(".")[-1]
    for fnames, alt, fmt in files:
        if alts is not None and alt not in alts:
            continue
        if isinstance(fnames, str):
            fnames = [fnames]
        with ExitStack() as stack:
            fs = [stack.enter_context(open_upstream_file(name, fn, "rb")) for fn in fnames]
            g = parse_graph(fs, (fmt, dict(hashed=True)), directed=True)
            vs = {g.vp.name[v] : v for v in g.vertices()}
            g.vp.date = g.new_vp("string")
            next(fs[1])
            for line in fs[1]:
                vals = line.split()
                try:
                    v = vs[int(vals[0])]
                except KeyError:
                    continue
                g.vp.date[v] = vals[1]
        yield alt, g