Browse Source

commit message

Lucas Gautheron 2 months ago
commit
5b59119d05
100 changed files with 601874 additions and 0 deletions
  1. 3 0
      .gitattributes
  2. 60 0
      .gitignore
  3. 5 0
      .gitmodules
  4. 8 0
      LICENSE
  5. 2 0
      README.md
  6. 1 0
      acl/acl.parquet
  7. 1 0
      acl/articles.parquet
  8. 1 0
      acl/articles_authors.parquet
  9. 19 0
      acl/export.py
  10. 131 0
      code/authors_sociality.py
  11. 95 0
      code/change.stan
  12. 508 0
      code/comparative_analysis.py
  13. 97 0
      code/disruption.stan
  14. 160 0
      code/ei_cov_softmax_control_nu.stan
  15. 277 0
      code/ei_map.py
  16. 89 0
      code/entered.stan
  17. 427 0
      code/etm.py
  18. 103 0
      code/etm_compile.py
  19. 85 0
      code/etm_ei.py
  20. 169 0
      code/etm_transfers.py
  21. 89 0
      code/exited.stan
  22. 421 0
      code/optimal_transport.py
  23. 72 0
      code/topic_distance.py
  24. 1 0
      inspire-harvest
  25. 579 0
      output/acl_2002_2022/aggregate.csv
  26. 58290 0
      output/acl_2002_2022/articles.csv
  27. 1 0
      output/acl_2002_2022/authors_full_records.pickle
  28. 10825 0
      output/acl_2002_2022/brokerage.csv
  29. 13474 0
      output/acl_2002_2022/cost_vs_nu_knowledge.eps
  30. 1 0
      output/acl_2002_2022/dataset.pickle
  31. 1 0
      output/acl_2002_2022/ei_samples_control_nu.npz
  32. 1 0
      output/acl_2002_2022/embeddings.bin
  33. 1 0
      output/acl_2002_2022/etm_instance.pickle
  34. 1 0
      output/acl_2002_2022/keywords.npy
  35. 20 0
      output/acl_2002_2022/largest_transfers.tex
  36. 1 0
      output/acl_2002_2022/model
  37. 20 0
      output/acl_2002_2022/most_conservative.tex
  38. 5967 0
      output/acl_2002_2022/ngrams.csv
  39. 1 0
      output/acl_2002_2022/nu_expertise.npy
  40. 1 0
      output/acl_2002_2022/nu_expertise_symmetric.npy
  41. 1 0
      output/acl_2002_2022/nu_ling.npy
  42. 1 0
      output/acl_2002_2022/nu_ling_symmetric.npy
  43. 19 0
      output/acl_2002_2022/params.yml
  44. 1 0
      output/acl_2002_2022/pooled_resources.parquet
  45. 1 0
      output/acl_2002_2022/sankey_control_nu.pdf
  46. 1 0
      output/acl_2002_2022/sankey_control_nu_acl.pdf
  47. 1 0
      output/acl_2002_2022/scores.npy
  48. 11110 0
      output/acl_2002_2022/topic_citation_matrix.eps
  49. 21 0
      output/acl_2002_2022/topics.csv
  50. 1 0
      output/acl_2002_2022/topics_counts.npy
  51. 1 0
      output/acl_2002_2022/topics_order.npy
  52. 2196 0
      output/authors_brokerage.csv
  53. 1931 0
      output/authors_centrality.csv
  54. 1 0
      output/etm_20_pretrained/.DS_Store
  55. 1 0
      output/etm_20_pretrained/age_vs_position.png
  56. 2196 0
      output/etm_20_pretrained/aggregate.csv
  57. 186163 0
      output/etm_20_pretrained/articles.csv
  58. 1 0
      output/etm_20_pretrained/authors_full_records.pickle
  59. 1 0
      output/etm_20_pretrained/authors_keywords_record.npz
  60. 23547 0
      output/etm_20_pretrained/brokerage.csv
  61. 6 0
      output/etm_20_pretrained/capital_measures.csv
  62. 9518 0
      output/etm_20_pretrained/capital_measures.eps
  63. 9431 0
      output/etm_20_pretrained/change_score.eps
  64. 10043 0
      output/etm_20_pretrained/change_score_effects_entropy_brokerage.eps
  65. 10048 0
      output/etm_20_pretrained/change_score_effects_entropy_magnitude.eps
  66. 10599 0
      output/etm_20_pretrained/cost_delta.eps
  67. 10597 0
      output/etm_20_pretrained/cost_delta_identity.eps
  68. 10549 0
      output/etm_20_pretrained/cost_delta_knowledge.eps
  69. 10998 0
      output/etm_20_pretrained/cost_matrix_counterfactual_couplings_knowledge_bounded.eps
  70. 10588 0
      output/etm_20_pretrained/cost_matrix_knowledge_bounded.eps
  71. 10902 0
      output/etm_20_pretrained/cost_matrix_predicted_couplings_knowledge_bounded.eps
  72. 11002 0
      output/etm_20_pretrained/cost_matrix_true_couplings_knowledge_bounded.eps
  73. 14773 0
      output/etm_20_pretrained/cost_vs_nu.eps
  74. 14789 0
      output/etm_20_pretrained/cost_vs_nu_identity.eps
  75. 14775 0
      output/etm_20_pretrained/cost_vs_nu_knowledge.eps
  76. 1 0
      output/etm_20_pretrained/dataset.pickle
  77. 9184 0
      output/etm_20_pretrained/disruption_score.eps
  78. 9837 0
      output/etm_20_pretrained/disruption_score_effects_entropy_magnitude.eps
  79. 10611 0
      output/etm_20_pretrained/ei_R_control_nu.eps
  80. 10616 0
      output/etm_20_pretrained/ei_counts_control_nu.eps
  81. 11065 0
      output/etm_20_pretrained/ei_delta_control_nu.eps
  82. 10766 0
      output/etm_20_pretrained/ei_gamma_control_nu.eps
  83. 10413 0
      output/etm_20_pretrained/ei_mu_control_nu.eps
  84. 1 0
      output/etm_20_pretrained/ei_samples_control_nu.npz
  85. 1 0
      output/etm_20_pretrained/ei_samples_control_nu_crossval.npz
  86. 1 0
      output/etm_20_pretrained/embeddings.bin
  87. 1 0
      output/etm_20_pretrained/embeddings.mdl
  88. 10105 0
      output/etm_20_pretrained/entered_score_effects_entropy.eps
  89. 1381 0
      output/etm_20_pretrained/entropy.eps
  90. 1 0
      output/etm_20_pretrained/etm_instance.pickle
  91. 9109 0
      output/etm_20_pretrained/exists.eps
  92. 10171 0
      output/etm_20_pretrained/exited_score_effects_entropy.eps
  93. 10211 0
      output/etm_20_pretrained/exited_score_effects_entropy_brokerage.eps
  94. 10513 0
      output/etm_20_pretrained/exited_score_effects_entropy_magnitude.eps
  95. 1 0
      output/etm_20_pretrained/keywords.npy
  96. 1 0
      output/etm_20_pretrained/keywords_topics.npy
  97. 1 0
      output/etm_20_pretrained/knowledge_transfers_weights.npz
  98. 31 0
      output/etm_20_pretrained/low_change.tex
  99. 31 0
      output/etm_20_pretrained/low_disruption.tex
  100. 0 0
      output/etm_20_pretrained/low_entered.tex

+ 3 - 0
.gitattributes

@@ -0,0 +1,3 @@
+* annex.backend=MD5E
+**/.git* annex.largefiles=nothing
+* annex.largefiles=((mimeencoding=binary)and(largerthan=0))

+ 60 - 0
.gitignore

@@ -0,0 +1,60 @@
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+

+ 5 - 0
.gitmodules

@@ -0,0 +1,5 @@
+[submodule "inspire-harvest"]
+	path = inspire-harvest
+	url = git@gin.g-node.org:/lucasgautheron/inspire-harvest.git
+	datalad-id = 3f7d09c3-436a-480d-b3b7-7bcebe4cecb4
+	datalad-url = git@gin.g-node.org:/lucasgautheron/inspire-harvest.git

+ 8 - 0
LICENSE

@@ -0,0 +1,8 @@
+MIT License
+Copyright (c) <year> <copyright holders>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 2 - 0
README.md

@@ -0,0 +1,2 @@
+# detecting-innovations
+

+ 1 - 0
acl/acl.parquet

@@ -0,0 +1 @@
+../.git/annex/objects/pW/G3/MD5E-s512856279--693529823b13502e92bc634d21cb71cc/MD5E-s512856279--693529823b13502e92bc634d21cb71cc

+ 1 - 0
acl/articles.parquet

@@ -0,0 +1 @@
+../.git/annex/objects/xp/9Q/MD5E-s827613342--4b47f7ada56248546b8f3ea2b5e27844/MD5E-s827613342--4b47f7ada56248546b8f3ea2b5e27844

+ 1 - 0
acl/articles_authors.parquet

@@ -0,0 +1 @@
+../.git/annex/objects/V3/p9/MD5E-s3370862--86a68ef8bfcccb50f6759feda90b270d/MD5E-s3370862--86a68ef8bfcccb50f6759feda90b270d

+ 19 - 0
acl/export.py

@@ -0,0 +1,19 @@
+import pandas as pd 
+
+df = pd.read_parquet("acl.parquet")
+
+articles = df
+articles["article_id"] = df.index.astype(int)
+articles["date_created"] = articles["year"]
+articles["categories"] = None
+
+articles_authors = articles[["article_id", "acl_id", "author"]]
+articles_authors["bai"] = articles_authors["author"].str.split("and\n").fillna("")
+articles_authors = articles_authors.explode("bai")
+articles_authors["bai"] = articles_authors["bai"].str.lower().str.strip()
+articles_authors["bai"] = articles_authors["bai"].str.extract(r"^((?:.*), [a-z])", expand=False)
+articles_authors = articles_authors[articles_authors["bai"].fillna("").map(len)>1]
+articles_authors = articles_authors[["article_id", "acl_id", "bai"]]
+articles_authors.to_parquet("articles_authors.parquet")
+
+articles.to_parquet("articles.parquet")

+ 131 - 0
code/authors_sociality.py

@@ -0,0 +1,131 @@
+import pandas as pd
+import numpy as np
+import argparse
+
+import networkx as nx
+
+from os.path import join as opj
+
+from itertools import combinations
+
+import pickle
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input")
+parser.add_argument("--dataset", default="inspire-harvest/database")
+parser.add_argument("--begin", type=int, default=2000)
+parser.add_argument("--end", type=int, default=2009)
+parser.add_argument("--categories", nargs="+", default=[], required=False)
+args = parser.parse_args()
+
+n_topics = len(pd.read_csv(opj(args.input, "topics.csv")))
+
+articles = pd.read_parquet(opj(args.dataset, "articles.parquet"))[
+    ["date_created", "categories", "article_id"]
+]
+articles = articles[articles["date_created"].str.len() >= 4]
+if "year" not in articles.columns:
+    articles["year"] = articles["date_created"].str[:4].astype(int)
+else:
+    articles["year"] = articles["year"].astype(int)
+
+articles = articles[(articles["year"] >= args.begin) & (articles["year"] <= args.end)]
+
+topic_matrix = np.load(opj(args.input, "topics_counts.npy"))
+_articles = pd.read_csv(opj(args.input, "articles.csv"))
+_articles["topics"] = [topic_matrix[i, :] for i in range(len(_articles))]
+articles["article_id"] = articles.article_id.astype(int)
+articles = _articles.merge(articles, how="inner").set_index("article_id")
+
+if len(args.categories):
+    articles = articles[
+        articles.categories.map(lambda l: any([x in l for x in args.categories]))
+    ]
+
+articles_authors = pd.read_parquet(
+   opj(args.dataset, "articles_authors.parquet")
+)
+articles_authors["article_id"] = articles_authors.article_id.astype(int)
+articles_authors = articles_authors[articles_authors["article_id"].isin(articles.index)]
+
+articles_authors_list = articles_authors.groupby("article_id").agg(
+    authors=("bai", lambda l: "||".join(filter(None, l)))
+)
+
+articles = articles.merge(articles_authors_list, left_index=True, right_index=True)
+articles["authors"] = articles["authors"].map(lambda s: s.split("||"))
+
+G = nx.Graph()
+for article_id, authors in articles_authors.groupby("article_id"):
+    if len(authors) >= 50:
+        continue
+
+    for a, b in combinations(authors["bai"].tolist(), 2):
+        if G.has_edge(a, b):
+            G[a][b]["weight"] = max(G[a][b]["weight"], 1 / (len(authors) - 1))
+        else:
+            G.add_edge(a, b, weight=1 / (len(authors) - 1))
+
+# degree = G.degree(weight="weight")
+# degree = {node: value for node, value in degree}
+
+selected_authors = pd.read_csv(opj(args.input, "aggregate.csv"))
+
+N = len(G.nodes)
+brokerage = np.zeros(N)
+degree = np.zeros(N)
+
+for i, bai in enumerate(G.nodes):    
+    co_authors = list(G.neighbors(bai))
+    degree[i] = np.sum([G[bai][x]["weight"] for x in co_authors])
+
+    for x,y in combinations(co_authors, 2):
+        if not G.has_edge(x,y):
+            common_neighbors = set(G.neighbors(x))&set(G.neighbors(y))
+            b = G[bai][x]["weight"]*G[bai][y]["weight"]
+            if len(common_neighbors)<=1:
+                brokerage[i] += b
+
+pd.DataFrame({
+    "bai": list(G.nodes), "brokerage": brokerage, "degree": degree
+}).to_csv(opj(args.input, "brokerage.csv"))
+
+N = len(selected_authors)
+pooled_resources = np.zeros((N, n_topics))
+
+for i, bai in enumerate(selected_authors["bai"].tolist()):    
+    if bai not in G.nodes:
+        continue
+
+    co_authors = list(G.neighbors(bai))
+        
+    for co_author in co_authors:
+        co_author_own_pubs = articles[
+            articles["authors"].apply(lambda l: co_author in l and bai not in l)
+        ]
+
+        if len(co_author_own_pubs) == 0:
+            continue
+
+        co_author_expertise = np.stack(co_author_own_pubs["topics"].fillna(0).values)
+        weight = np.array(1.0 / co_author_own_pubs.authors.map(len))
+        co_author_expertise = co_author_expertise * weight[:, np.newaxis]
+
+        co_author_expertise = (
+            co_author_expertise.sum(axis=0) / co_author_expertise.sum()
+        )
+
+        co_author_expertise = np.nan_to_num(co_author_expertise)
+
+        print(bai, G[bai][co_author]["weight"], len(co_author_own_pubs), co_author_expertise.argmax(), weight.mean())
+
+        pooled_resources[i, :] += G[bai][co_author]["weight"] * co_author_expertise
+
+bai = selected_authors["bai"]
+
+selected_authors["pooled_resources"] = [
+    pooled_resources[i] for i in range(len(selected_authors))
+]
+selected_authors[["bai", "pooled_resources"]].to_parquet(
+    opj(args.input, "pooled_resources.parquet")
+)

+ 95 - 0
code/change.stan

@@ -0,0 +1,95 @@
+functions {
+    vector z_scale(vector x) {
+        return (x-mean(x))/sd(x);
+    }
+}
+
+data {
+    int<lower=1> N;
+    int<lower=1> K;
+    vector<lower=0>[N] soc_cap;
+    vector<lower=0>[N] soc_div;
+    vector<lower=0>[N] int_div;
+    vector[N] res_soc_div;
+    //vector<lower=0>[N] age;
+    vector<lower=0>[N] m;
+    matrix<lower=0,upper=1>[N,K] x;
+    vector[N] stable;
+    //vector<lower=0>[N] age;
+    array [N] int<lower=0,upper=K-1> primary_research_area;
+}
+
+transformed data {
+    vector[N] z_m = z_scale(m);
+    vector[N] z_soc_cap = z_scale(soc_cap);
+    vector[N] z_soc_div = z_scale(soc_div);
+    vector[N] z_int_div = z_scale(int_div);
+    vector[N] z_res_soc_div = z_scale(res_soc_div);
+}
+
+parameters {
+    real beta_soc_cap;
+    real beta_soc_div;
+    real beta_int_div;
+    real beta_stable;
+    //real beta_age;
+    vector[K] beta_x;
+
+    real mu;
+    real<lower=0> tau;
+    //real<lower=1> sigma;
+    real<lower=0> sigma;
+
+    vector<lower=0,upper=1>[K] mu_x;
+    vector<lower=1>[K] eta;
+    real<lower=0,upper=1> mu_pop;
+    real<lower=1> eta_pop;
+}
+
+model {
+    vector[N] beta_research_area;
+    for (k in 1:N) {
+        beta_research_area[k] = beta_x[primary_research_area[k]+1]*tau;
+    }
+
+    beta_soc_cap ~ normal(0, 1);
+    beta_soc_div ~ normal(0, 1);
+    beta_int_div ~ normal(0, 1);
+    beta_x ~ double_exponential(0, 1);
+    beta_stable ~ normal(0, 1);
+    //beta_age ~ normal(0, 1);
+
+    mu ~ normal(0, 1);
+    tau ~ exponential(1);
+    //sigma ~ pareto(1, 1.5);
+    sigma ~ exponential(1);
+
+    //m ~ beta_proportion(inv_logit(beta_soc_cap*z_soc_cap + beta_soc_div*res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu), sigma);
+    z_m ~ normal(beta_soc_cap*z_soc_cap + beta_soc_div*z_res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu, sigma);
+
+    eta ~ pareto(1, 1.5);
+    mu_x ~ uniform(0, 1);
+    eta_pop ~ pareto(1, 1.5);
+    mu_pop ~ uniform(0, 1);
+    for (k in 1:N) {
+       m[k] ~ beta_proportion(mu_x[primary_research_area[k]+1], eta[primary_research_area[k]+1]);
+    }
+    m ~ beta_proportion(mu_pop, eta_pop);
+}
+
+generated quantities {
+    real R2 = 0;
+    {
+        vector[N] beta_research_area;
+        for (k in 1:N) {
+            beta_research_area[k] = beta_x[primary_research_area[k]+1]*tau;
+        }
+        //vector[N] pred = inv_logit(beta_soc_cap*z_soc_cap + beta_soc_div*res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu);
+        vector[N] pred = beta_soc_cap*z_soc_cap + beta_soc_div*z_res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu;
+
+        R2 = mean(square(z_m-pred))/variance(z_m);
+        R2 = 1-R2;
+    }
+    
+}
+

+ 508 - 0
code/comparative_analysis.py

@@ -0,0 +1,508 @@
+from cProfile import label
+import numpy as np
+import pandas as pd
+from scipy.stats import entropy
+import ot
+from sklearn.linear_model import LinearRegression
+
+from matplotlib import pyplot as plt
+import matplotlib
+matplotlib.use("pgf")
+matplotlib.rcParams.update(
+    {
+        "pgf.texsystem": "xelatex",
+        "font.family": "serif",
+        "font.serif": "Times New Roman",
+        "text.usetex": True,
+        "pgf.rcfonts": False,
+    }
+)
+plt.rcParams["text.latex.preamble"].join([
+        r"\usepackage{amsmath}",              
+        r"\setmainfont{amssymb}",
+])
+
+from textwrap import wrap
+
+import argparse
+from os.path import join as opj, exists
+import pickle
+
+from cmdstanpy import CmdStanModel
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input")
+parser.add_argument("--suffix", default=None)
+parser.add_argument("--metric", default="change", choices=["change", "disruption", "diversification", "diversification_stirling", "entered", "exited"])
+parser.add_argument("--diversity", default="entropy", choices=["entropy", "stirling"])
+parser.add_argument("--power", choices=["magnitude", "brokerage"], default="magnitude")
+parser.add_argument("--model", default="", choices=["", "bare"])
+parser.add_argument("--compact", action="store_true", default=False)
+args = parser.parse_args()
+
+def institution_stability():
+    if exists(opj(args.input, "institutional_stability.csv")):
+        return pd.read_csv(opj(args.input, "institutional_stability.csv"), index_col="bai")
+    
+    affiliations = pd.read_parquet("../semantics/inspire-harvest/database/affiliations.parquet")
+    affiliations["article_id"] = affiliations.article_id.astype(int)
+
+    articles = pd.read_parquet("../semantics/inspire-harvest/database/articles.parquet")[["article_id", "date_created"]]
+
+    articles = articles[articles["date_created"].str.len() >= 4]
+    articles["year"] = articles["date_created"].str[:4].astype(int) - 2000
+
+    articles["article_id"] = articles.article_id.astype(int)
+    articles = articles[articles["year"] <= 2019 - 2000]
+    articles = articles[articles["year"] >= 0]
+
+    affiliations["article_id"] = affiliations.article_id.astype(int)
+    affiliations = affiliations.merge(articles, how="inner", left_on="article_id", right_on="article_id")
+    affiliations = affiliations[affiliations["bai"].isin(df["bai"])]
+
+    authors_last = affiliations.groupby("bai").agg(last_article=("year", "max"))
+
+    hosts = affiliations.sort_values(["bai", "institution_id", "year"]).groupby(["bai", "institution_id"]).agg(
+        first=("year", "min"),
+        last=("year", "max")
+    )
+    hosts["duration"] = hosts["last"]-hosts["first"]
+    stability = hosts.groupby("bai").agg(stability=("duration", "max"), last=("last", "max"), first=("first", "min"))
+    stability = stability.merge(authors_last, left_index=True, right_index=True)
+    stability["stable"] = stability["stability"]>=(stability["last"]-stability["first"]-1)
+    stability.to_csv(opj(args.input, "institutional_stability.csv"))
+    return stability
+
+suffix = f"_{args.suffix}" if args.suffix is not None else ""
+
+topics = pd.read_csv(opj(args.input, "topics.csv"))
+junk = topics["label"].str.contains("Junk")
+topics = topics[~junk]["label"].tolist()
+
+fig, ax = plt.subplots()
+
+n_topics = len(pd.read_csv(opj(args.input, "topics.csv")))
+df = pd.read_csv(opj(args.input, "aggregate.csv"))
+
+resources = pd.read_parquet(opj(args.input, "pooled_resources.parquet"))
+df = df.merge(resources, left_on="bai", right_on="bai")
+
+NR = np.stack(df[[f"start_{k+1}" for k in range(n_topics)]].values).astype(int)
+NC = np.stack(df[[f"end_{k+1}" for k in range(n_topics)]].values).astype(int)
+expertise = np.stack(df[[f"expertise_{k+1}" for k in range(n_topics)]].values)
+S = np.stack(df["pooled_resources"])
+
+brokerage = pd.read_csv("output/authors_brokerage.csv")
+df = df.merge(brokerage, left_on="bai", right_on="bai")
+
+NR = NR[:,~junk]
+NC = NC[:,~junk]
+expertise = expertise[:,~junk]
+S = S[:,~junk]
+
+x = NR/NR.sum(axis=1)[:,np.newaxis]
+y = NC/NC.sum(axis=1)[:,np.newaxis]
+S_distrib = S/S.sum(axis=1)[:,np.newaxis]
+
+
+# R = np.array([
+#     [((expertise[:,i]>expertise[:,i].mean())&(expertise[:,j]>expertise[:,j].mean())).mean()/((expertise[:,i]>expertise[:,i].mean())|(expertise[:,j]>expertise[:,j].mean())).mean() for j in range(len(topics))]
+#     for i in range(len(topics))
+# ])
+
+R = np.array([
+    [((expertise[:,i]>expertise[:,i].mean())&(expertise[:,j]>expertise[:,j].mean())).mean()/(expertise[:,i]>expertise[:,i].mean()).mean() for j in range(len(topics))]
+    for i in range(len(topics))
+])
+
+change = np.abs(y-x).sum(axis=1)/2
+diversification = (np.exp(entropy(y, axis=1))-np.exp(entropy(x, axis=1)))/x.shape[1]
+x_matrix = np.einsum("ki,kj->kij", x, x)
+y_matrix = np.einsum("ki,kj->kij", y, y)
+x_stirling = 1-np.einsum("ij,kij->k", R, x_matrix)
+y_stirling = 1-np.einsum("ij,kij->k", R, y_matrix)
+
+disruption = np.zeros(len(change))
+for a in range(len(change)):
+    disruption[a] = ot.emd2(x[a,:].copy(order='C'), y[a,:].copy(order='C'), 1-R, processes=4)
+
+alpha = 1
+exited = ((x>alpha*x.mean(axis=0))&(y<alpha*y.mean(axis=0))).sum(axis=1)
+entered = ((x<alpha*x.mean(axis=0))&(y>alpha*y.mean(axis=0))).sum(axis=1)
+
+fig, ax = plt.subplots(figsize=[6.4, 3.2])
+ax.hist(change, bins=np.linspace(0,1,50), histtype="step")
+ax.set_xlabel(f"Change score $c_a = \\frac{{1}}{{2}}\\sum_k |y_{{ak}}-x_{{ak}}|$")
+ax.set_ylabel("\\# of scientists")
+fig.savefig(opj(args.input, "change_score.eps"), bbox_inches="tight")
+
+print("change 50%% interval: ", np.quantile(change,q=0.25), np.quantile(change,q=1-0.25))
+
+fig, ax = plt.subplots(figsize=[6.4, 3.2])
+ax.hist(diversification, bins=np.linspace(-0.5,0.5,50), histtype="step")
+ax.set_xlabel(f"Diversification score $\\Delta_a$")
+ax.set_ylabel("\\# of scientists")
+fig.savefig(opj(args.input, "diversification_score.eps"), bbox_inches="tight")
+
+fig, ax = plt.subplots()
+ax.hist(disruption, bins=np.linspace(0,1,50), histtype="step")
+ax.set_xlabel(f"Disruption score $d_a$")
+ax.set_ylabel("\\# of scientists")
+fig.savefig(opj(args.input, "disruption_score.eps"), bbox_inches="tight")
+
+df["change_score"] = change
+df["disruption_score"] = disruption
+df["diversification_score"] = diversification
+df["diversification_stirling_score"] = y_stirling-x_stirling
+df["entered_score"] = (entered>0).astype(int)
+df["exited_score"] = (exited>0).astype(int)
+
+df["origin"] = np.argmax(x, axis=1)
+df["target"] = np.argmax(y, axis=1)
+
+df["origin_value"] = x.max(axis=1)
+df["target_value"] = y.max(axis=1)
+
+df["origin_final_value"] = np.array(y[a,df.loc[a, "origin"]] for a in range(x.shape[0]))
+df["target_initial_value"] = np.array(x[a,df.loc[a, "target"]] for a in range(x.shape[0]))
+
+df["origin_label"] = df["origin"].apply(lambda k: topics[k])
+df["target_label"] = df["target"].apply(lambda k: topics[k])
+
+df["origin_label"] = df.apply(lambda row: row["origin_label"] + (f" ({row['origin_value']:.2f})" if row["origin"]==row["target"] else f" ({row['origin_value']:.2f}$\\to${row['origin_final_value']:.2f})"), axis=1)
+df["target_label"] = df.apply(lambda row: row["target_label"] + (f" ({row['target_value']:.2f})" if row["origin"]==row["target"] else f" ({row['target_initial_value']:.2f}$\\to${row['target_value']:.2f})"), axis=1)
+
+df["social_entropy"] = np.exp(entropy(S,axis=1))
+df["intellectual_entropy"] = np.exp(entropy(expertise,axis=1))
+
+expertise_matrix = np.einsum("ki,kj->kij", expertise, expertise)
+social_expertise_matrix = np.einsum("ki,kj->kij", S_distrib, S_distrib)
+df["intellectual_stirling"] = 1-np.einsum("ij,kij->k", R, expertise_matrix)
+df["social_stirling"] = 1-np.einsum("ij,kij->k", R, social_expertise_matrix)
+
+
+stability = institution_stability()
+df = df.merge(stability, left_on="bai", right_index=True)
+
+age = pd.read_csv(opj(args.input, "outcomes.csv"))[["bai", "age"]].drop_duplicates()
+df = df.merge(age, left_on="bai", right_on="bai")
+
+df["primary_research_area"] = x.argmax(axis=1)
+
+df["social_diversity"] = df[f"social_{args.diversity}"].fillna(0)
+df["intellectual_diversity"] = df[f"intellectual_{args.diversity}"].fillna(0)
+
+df["res_social_diversity"] = df["social_diversity"]-LinearRegression().fit(df[["intellectual_diversity"]], df["social_diversity"]).predict(df[["intellectual_diversity"]])
+
+data = {
+    "N": len(df),
+    "K": x.shape[1],
+    "m": df[f"{args.metric}_score"],
+    # "soc_cap": np.log(1+S.sum(axis=1)),
+    "soc_cap": S.sum(axis=1) if args.power == "magnitude" else df["brokerage"].values,
+    "soc_div": df["social_diversity"],
+    "int_div": df["intellectual_diversity"],
+    "res_soc_div": df["res_social_diversity"],
+    "x": x,
+    "initial_div": np.exp(entropy(x, axis=1)),
+    "primary_research_area": df["primary_research_area"],
+    "stable": df["stable"].astype(float).values,
+    "age": df["age"].values
+}
+
+fig, ax = plt.subplots(figsize=[6.4, 3.2])
+ax.hist(change[df["primary_research_area"] != 4], bins=np.linspace(0,1,25), histtype="step", label=f"Others ($\\mu={change[df['primary_research_area'] != 4].mean():.2f}$)", density=True)
+ax.hist(change[df["primary_research_area"] == 4], bins=np.linspace(0,1,25), histtype="step", label=f"Collider physics ($\\mu={change[df['primary_research_area'] == 4].mean():.2f}$)", density=True)
+ax.set_xlabel(f"Change score $c_a = \\frac{{1}}{{2}}\\sum_k |y_{{ak}}-x_{{ak}}|$")
+ax.set_ylabel("\\# of scientists")
+ax.legend(loc='upper right', bbox_to_anchor=(1, 1.2))
+fig.savefig(opj(args.input, "change_score_collider_physics.eps"), bbox_inches="tight")
+
+fig, ax = plt.subplots(figsize=[6.4, 3.2])
+ax.hist(disruption[df["primary_research_area"] != 4], bins=np.linspace(0,1,25), histtype="step", label=f"Others ($\\mu={disruption[df['primary_research_area'] != 4].mean():.2f}$)", density=True)
+ax.hist(disruption[df["primary_research_area"] == 4], bins=np.linspace(0,1,25), histtype="step", label=f"Collider physics ($\\mu={disruption[df['primary_research_area'] == 4].mean():.2f}$)", density=True)
+ax.set_xlabel(f"Disruption score $d_a$")
+ax.set_ylabel("\\# of scientists")
+ax.legend(loc='upper right', bbox_to_anchor=(1, 1.2))
+fig.savefig(opj(args.input, "disruption_score_collider_physics.eps"), bbox_inches="tight")
+
+if not exists(opj(args.input, f"samples_{args.metric}_{args.diversity}_{args.power}.npz")):
+    model = CmdStanModel(
+        stan_file=f"code/{args.metric}.stan" if args.model=="" else f"code/{args.metric}_{args.model}_{args.power}.stan",
+    )
+
+    fit = model.sample(
+        data=data,
+        chains=4,
+        iter_sampling=10000,
+        iter_warmup=1000,
+        show_console=True
+    )
+
+    vars = fit.stan_variables()
+    samples = {}
+    for (k, v) in vars.items():
+        samples[k] = v
+
+    np.savez_compressed(opj(args.input, f"samples_{args.metric}_{args.diversity}_{args.power}.npz"), **samples)
+
+
+samples = np.load(opj(args.input, f"samples_{args.metric}_{args.diversity}_{args.power}.npz"))
+
+labels = [
+    "Intellectual capital (diversity)",
+    "Social capital (diversity)",
+    "Social capital (power)",
+    "Stable affiliation",
+]
+labels = [f"\\textbf{{{label}}}" for label in labels]
+
+labels += topics
+
+names = [
+    "beta_int_div", "beta_soc_div", "beta_soc_cap", "beta_stable"
+]
+
+if args.metric not in ["entered", "exited"]:
+    mu = np.array([samples[name].mean() for name in names] + [(samples["beta_x"][:,i]*samples["tau"]).mean() for i in range(x.shape[1])])
+    low = np.array([np.quantile(samples[name], q=0.05/2) for name in names] + [np.quantile(samples["beta_x"][:,i]*samples["tau"], q=0.05/2) for i in range(x.shape[1])])
+    up = np.array([np.quantile(samples[name], q=1-0.05/2) for name in names] + [np.quantile(samples["beta_x"][:,i]*samples["tau"], q=1-0.05/2) for i in range(x.shape[1])])
+    sig = up*low>0
+
+    prob = np.array([(samples[name]*np.sign(samples[name].mean())<0).mean() for name in names] + [((samples["beta_x"][:,i]*np.sign(samples["beta_x"][:,i].mean()))<0).mean() for i in range(x.shape[1])])
+
+    keep = sig | (np.arange(len(sig))<len(names))
+    mu = mu[keep]
+    low = low[keep]
+    up = up[keep]
+    prob = prob[keep]
+
+    sign = ["<" if _mu>0 else ">" for i, _mu in enumerate(mu)]
+    labels = [label for i, label in enumerate(labels) if keep[i]]
+    n_vars = len(labels)
+
+
+    # effect of capital and controls
+    fig, ax = plt.subplots(figsize=[6.4, 0.4*(1+n_vars)])
+
+    ax.scatter(mu, np.arange(len(labels))[::-1])
+    ax.errorbar(mu, np.arange(len(labels))[::-1], xerr=(mu-low,up-mu), ls="none", capsize=4, elinewidth=1)
+    ax.set_yticks(np.arange(len(labels))[::-1], labels)
+    for i, p in enumerate(prob):
+        if p>1e-4 and np.abs(p-0.5)>0.4:
+            ax.text(
+                -0.02 if mu[i]>0 else 0.02,
+                np.arange(len(labels))[::-1][i],
+                f"\\scriptsize $\\mu(\\beta)={mu[i]:.2g}, P(\\beta{sign[i]}0)={p:.2g}$",
+                ha="right" if mu[i]>0 else "left",
+                va="center"
+            )
+        elif p<0.05/2 or p>1-0.05/2:
+            ax.text(
+                -0.02 if mu[i]>0 else 0.02,
+                np.arange(len(labels))[::-1][i],
+                f"\\scriptsize $\\mu(\\beta)={mu[i]:.2g}$",
+                ha="right" if mu[i]>0 else "left",
+                va="center"
+            )
+
+    ax.set_xlabel(f"Effect on {args.metric}")
+    ax.axvline(0, color="black")
+    fig.savefig(opj(args.input, f"{args.metric}_score_effects_{args.diversity}_{args.power}.eps"), bbox_inches="tight")
+
+    # average change score per research area
+    ratio = args.metric != "diversification"
+    labels = topics
+    if ratio:
+        mu = np.array([(samples["mu_x"][:,i]/samples["mu_pop"]).mean() for i in range(x.shape[1])])
+        low = np.array([np.quantile(samples["mu_x"][:,i]/samples["mu_pop"], q=0.05/2) for i in range(x.shape[1])])
+        up = np.array([np.quantile(samples["mu_x"][:,i]/samples["mu_pop"], q=1-0.05/2) for i in range(x.shape[1])])
+        sig = (up-1)*(low-1)>0
+    else:
+        mu = np.array([(samples["mu_x"][:,i]-samples["mu_pop"]).mean() for i in range(x.shape[1])])
+        low = np.array([np.quantile(samples["mu_x"][:,i]-samples["mu_pop"], q=0.05/2) for i in range(x.shape[1])])
+        up = np.array([np.quantile(samples["mu_x"][:,i]-samples["mu_pop"], q=1-0.05/2) for i in range(x.shape[1])])
+        sig = (up)*(low)>0
+
+    keep = sig
+    mu = mu[keep]
+    low = low[keep]
+    up = up[keep]
+    labels = [label for i, label in enumerate(labels) if keep[i]]
+
+    fig, ax = plt.subplots(figsize=[6.4, 3.2])
+    ax.scatter(mu, np.arange(len(labels))[::-1])
+    ax.errorbar(mu, np.arange(len(labels))[::-1], xerr=(mu-low,up-mu), ls="none", capsize=4, elinewidth=1)
+    ax.set_yticks(np.arange(len(labels))[::-1], labels)
+
+    fig, ax = plt.subplots(figsize=[6.4, 3.2])
+
+    df["m_ratio"] = df[f"{args.metric}_score"]/df[f"{args.metric}_score"].mean()
+    research_areas = df.groupby("primary_research_area").agg(
+        mu=("m_ratio", "mean"),
+        low=("m_ratio", lambda x: np.quantile(x, q=0.05/2)),
+        up=("m_ratio", lambda x: np.quantile(x, q=1-0.05/2)),
+        label=("origin_label", lambda x: x.iloc[0])
+    ).reset_index()
+
+    ax.scatter(research_areas["mu"], research_areas.index)
+    ax.errorbar(research_areas["mu"], research_areas.index, xerr=(research_areas["mu"]-research_areas["low"],research_areas["up"]-research_areas["low"]), ls="none", capsize=4, elinewidth=1)
+    ax.set_yticks(research_areas.index, research_areas["label"])
+
+    ax.set_xlabel(f"Ratio to average {args.metric} score" if ratio else f"Difference with average {args.metric} score")
+    ax.axvline(1 if ratio else 0, color="black")
+    fig.savefig(opj(args.input, f"{args.metric}_research_area.eps"), bbox_inches="tight")
+else:
+
+    labels = [
+        "Intellectual capital (diversity)",
+        "Social capital (diversity)",
+        "Social capital (power)",
+        "Stable affiliation",
+    ]
+
+    if not args.compact:
+        labels = [f"\\textbf{{{label}}}" for label in labels]
+
+    labels += topics
+
+    samples = [
+        np.load(opj(args.input, f"samples_entered_{args.diversity}_{args.power}.npz")),
+        np.load(opj(args.input, f"samples_exited_{args.diversity}_{args.power}.npz"))
+    ]
+
+    mu = [None, None]
+    low = [None, None]
+    up = [None, None]
+    sig = [None, None]
+    prob = [None, None]
+    for i in range(2):
+        mu[i] = np.array([samples[i][name].mean() for name in names] + [(samples[i]["beta_x"][:,j]*samples[i]["tau"]).mean() for j in range(x.shape[1])])
+        low[i] = np.array([np.quantile(samples[i][name], q=0.05/2) for name in names] + [np.quantile(samples[i]["beta_x"][:,j]*samples[i]["tau"], q=0.05/2) for j in range(x.shape[1])])
+        up[i] = np.array([np.quantile(samples[i][name], q=1-0.05/2) for name in names] + [np.quantile(samples[i]["beta_x"][:,j]*samples[i]["tau"], q=1-0.05/2) for j in range(x.shape[1])])
+        sig[i] = up[i]*low[i]>0
+        prob[i] = np.array([(samples[i][name]*np.sign(samples[i][name].mean())<0).mean() for name in names] + [((samples[i]["beta_x"][:,j]*np.sign(samples[i]["beta_x"][:,j].mean()))<0).mean() for j in range(x.shape[1])])
+
+    if args.compact:
+        keep = (np.arange(len(sig[0]))<len(names))
+    else:
+        keep = sig[0] | sig[1] | (np.arange(len(sig[0]))<len(names))
+
+    for i in range(2):
+        mu[i] = mu[i][keep]
+        low[i] = low[i][keep]
+        up[i] = up[i][keep]
+        prob[i] = prob[i][keep]
+
+    sign = [["<" if _mu>0 else ">" for j, _mu in enumerate(mu[i])] for i in range(2)]
+    labels = [label for i, label in enumerate(labels) if keep[i]]
+    n_vars = len(labels)
+
+    if args.compact:
+        labels = [
+            '\n'.join(map(lambda x: f"\\textbf{{{x}}}", wrap(label, width=15))) if i < 4
+            else
+            '\n'.join(wrap(label, width=15))
+            for i, label in enumerate(labels)
+        ]
+        print(labels)
+
+
+    # effect of capital and controls
+    fig, ax = plt.subplots(figsize=[4.8 if args.compact else 6.4, 0.52*(1+n_vars)])
+    colors = ['#377eb8', '#ff7f00']
+    legend = ["entered new research area", "exited research area"]
+
+    if args.compact:
+        ax.set_xlim(-0.9, 1.25)
+
+    for j in range(2):
+        dy = -0.125 if j else +0.125
+        ax.scatter(mu[j], np.arange(len(labels))[::-1]+dy, color=colors[j])
+        ax.errorbar(mu[j], np.arange(len(labels))[::-1]+dy, xerr=(mu[j]-low[j],up[j]-mu[j]), ls="none", capsize=4, elinewidth=1, color=colors[j], label=legend[j])
+        for i, p in enumerate(prob[j]):
+            significant = p<0.05/2
+            if p>1e-4 and np.abs(p-0.5)>0.4 and significant:
+                ax.text(
+                    -0.02 if mu[j][i]>0 else 0.02,
+                    np.arange(len(labels))[::-1][i]+dy,
+                    f"\\scriptsize $\\mu(\\beta)={mu[j][i]:.2g},P(\\beta{sign[j][i]}0)={p:.2g}$",
+                    ha="right" if mu[j][i]>0 else "left",
+                    va="center"
+                )
+            elif p>1e-4 and np.abs(p-0.5)>0.4 and (not significant):
+                ax.text(
+                    -0.02 if mu[j][i]>0 else 0.02,
+                    np.arange(len(labels))[::-1][i]+dy,
+                    f"\\scriptsize $P(\\beta{sign[j][i]}0)={p:.2g}$",
+                    ha="right" if mu[j][i]>0 else "left",
+                    va="center"
+                )
+            elif significant:
+                ax.text(
+                    -0.02 if mu[j][i]>0 else 0.02,
+                    np.arange(len(labels))[::-1][i]+dy,
+                    f"\\scriptsize $\\mu(\\beta)={mu[j][i]:.2g}$",
+                    ha="right" if mu[j][i]>0 else "left",
+                    va="center"
+                )
+
+    ax.set_yticks(np.arange(len(labels))[::-1], labels)
+    ax.set_xlabel(f"Effect size (log odds ratio)")
+    ax.axvline(0, color="black")
+    if args.compact:
+        ax.legend(loc='upper right', bbox_to_anchor=(1, 1.3))
+    else:
+        ax.legend(loc='upper right', bbox_to_anchor=(1, 1.2))
+    fig.savefig(opj(args.input, f"{args.metric}_score_effects_{args.diversity}_{args.power}{'_compact' if args.compact else ''}.eps"), bbox_inches="tight")    
+
+table = df[["bai", "stable", f"{args.metric}_score", "intellectual_entropy", "social_entropy", "origin_label", "target_label"]].sort_values(f"{args.metric}_score", ascending=False)
+table.to_csv(opj(args.input, f"{args.metric}_scores.csv"))
+
+table["bai"] = table["bai"].str.replace(".1", "")
+table["bai"] = table["bai"].str.replace(r"^([A-Z])\.", r"\1.~")
+table["bai"] = table["bai"].str.replace(r"\.\~([A-Z])\.", r".~\1.~")
+table["bai"] = table["bai"].str.replace(r"([a-zA-Z]{2,})\.", r"\1 ")
+table["bai"] = table.apply(lambda r: r["bai"] if not r["stable"] else f"{r['bai']} ($\\ast$)", axis=1)
+
+table["target_label"] += "EOL"
+
+latex = table.head(20).to_latex(
+    columns=["bai", f"{args.metric}_score", "intellectual_entropy", "social_entropy", "origin_label", "target_label"],
+    header=["Physicist", "$c_a$", "$D(\\bm{I_a})$", "$D(\\bm{S_a})$", "Previous main area", "Current main area"],
+    index=False,    
+    multirow=True,
+    multicolumn=True,
+    column_format='p{0.15\\textwidth}|c|c|c|b{0.25\\textwidth}|b{0.25\\textwidth}',
+    escape=False,
+    float_format=lambda x: f"{x:.2f}",
+    caption="Physicists with the highest change scores $c_a$. $D(\\bm{I_a})$ and $D(\\bm{S_a})$ measure the diversity of intellectual and social capital. Numbers in parentheses indicate the share of attention dedicated to each research area during each time-period. Asterisks ($\\ast$) indicate physicists with a permanent position.",
+    label=f"table:top_{args.metric}",
+    position="H"
+)
+
+latex = latex.replace('EOL \\\\\n', '\\\\ \\hline\n')
+
+with open(opj(args.input, f"top_{args.metric}.tex"), "w+") as fp:
+    fp.write(latex)
+
+
+latex = table.sort_values(f"{args.metric}_score", ascending=True).head(20).to_latex(
+    columns=["bai", f"{args.metric}_score", "intellectual_entropy", "social_entropy", "origin_label", "target_label"],
+    header=["Physicist", "$c_a$", "$D(\\bm{I_a})$", "$D(\\bm{S_a})$", "Previous main area", "Current main area"],
+    index=False,    
+    multirow=True,
+    multicolumn=True,
+    column_format='p{0.15\\textwidth}|c|c|c|b{0.25\\textwidth}|b{0.25\\textwidth}',
+    escape=False,
+    float_format=lambda x: f"{x:.2f}",
+    caption="Physicists with the lowest change scores $c_a$. $D(\\bm{I_a})$ and $D(\\bm{S_a})$ measure the diversity of intellectual and social capital. Numbers in parentheses indicate the share of attention dedicated to each research area. Asterisks ($\\ast$) indicate physicists with a permanent position.",
+    label=f"table:low_{args.metric}",
+    position="H"
+)
+
+latex = latex.replace('EOL \\\\\n', '\\\\ \\hline\n')
+
+with open(opj(args.input, f"low_{args.metric}.tex"), "w+") as fp:
+    fp.write(latex)
+

+ 97 - 0
code/disruption.stan

@@ -0,0 +1,97 @@
+functions {
+    vector z_scale(vector x) {
+        return (x-mean(x))/sd(x);
+    }
+}
+
+data {
+    int<lower=1> N;
+    int<lower=1> K;
+    vector<lower=0>[N] soc_cap;
+    vector<lower=0>[N] soc_div;
+    vector<lower=0>[N] int_div;
+    //vector<lower=0>[N] age;
+    vector<lower=0>[N] m;
+    matrix<lower=0,upper=1>[N,K] x;
+    vector[N] stable;
+    array [N] int<lower=0,upper=K-1> primary_research_area;
+}
+
+transformed data {
+    vector[N] z_m = z_scale(m);
+    vector[N] z_soc_cap = z_scale(soc_cap);
+    vector[N] z_soc_div = z_scale(exp(soc_div));
+    vector[N] z_int_div = z_scale(exp(int_div));
+}
+
+parameters {
+    real beta_soc_cap;
+    real beta_soc_div;
+    real beta_int_div;
+    real beta_stable;
+    vector[K] beta_x;
+
+    real mu;
+    real<lower=0> tau;
+    real<lower=0> sigma;
+
+    real lambda_div;
+    real mu_div;
+    real<lower=0> sigma_div;
+
+    vector<lower=0,upper=1>[K] mu_x;
+    vector<lower=1>[K] eta;
+    real<lower=0,upper=1> mu_pop;
+    real<lower=1> eta_pop;
+}
+
+model {
+    vector[N] beta_research_area;
+    for (k in 1:N) {
+        beta_research_area[k] = beta_x[primary_research_area[k]+1];
+    }
+    vector[N] res_soc_div = z_scale(soc_div - (lambda_div*z_int_div + mu_div));
+
+    beta_soc_cap ~ normal(0, 1);
+    beta_soc_div ~ normal(0, 1);
+    beta_int_div ~ normal(0, 1);
+    beta_x ~ normal(0, tau);
+    beta_stable ~ normal(0, 1);
+
+    z_soc_div ~ normal(lambda_div*z_int_div + mu_div, sigma_div);
+    lambda_div ~ normal(0, 1);
+    mu_div ~ normal(0, 1);
+    sigma_div ~ exponential(1);
+
+    mu ~ normal(0, 1);
+    tau ~ exponential(1);
+    sigma ~ exponential(1);
+
+    z_m ~ normal(beta_soc_cap*z_soc_cap + beta_soc_div*res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu, sigma);
+
+    eta ~ pareto(1, 1.5);
+    mu_x ~ uniform(0, 1);
+    eta_pop ~ pareto(1, 1.5);
+    mu_pop ~ uniform(0, 1);
+    for (k in 1:N) {
+       m[k] ~ beta_proportion(mu_x[primary_research_area[k]+1], eta[primary_research_area[k]+1]);
+    }
+    m ~ beta_proportion(mu_pop, eta_pop);
+}
+
+generated quantities {
+    real R2 = 0;
+    {
+        vector[N] beta_research_area;
+        for (k in 1:N) {
+            beta_research_area[k] = beta_x[primary_research_area[k]+1];
+        }
+        vector[N] res_soc_div = z_scale(soc_div - (lambda_div*z_int_div + mu_div));
+        vector[N] pred = beta_soc_cap*z_soc_cap + beta_soc_div*res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu;
+
+        R2 = mean(square(z_m-pred))/variance(z_m);
+        R2 = 1-R2;
+    }
+    
+}
+

+ 160 - 0
code/ei_cov_softmax_control_nu.stan

@@ -0,0 +1,160 @@
+functions {
+    real partial_sum_lpdf(array[] vector X,
+        int start, int end,
+        int R,
+        int C,
+        array[,] int indices,
+        array[,] int NR,
+        array[,] int NC,
+        array[] vector cov,
+        array[] vector expertise,
+        matrix mu, vector L_sigma,
+        array[] vector delta,
+        array[] vector gamma,
+        array[] vector beta_vec
+        ) {
+            vector[C] theta;
+            matrix[C,R] beta;
+            vector [C] tmp = rep_vector(0, C);
+
+            real lpmf = 0;
+
+            for (i in start:end) {
+                for(r in 1:R) {
+                    if (NR[i,r] == 0) {
+                        beta[:,r] = rep_vector(0, C); 
+                    }
+                    else {
+                        lpmf += normal_lpdf(beta_vec[indices[i,r]] | 0, 1);
+                        tmp = mu[r,:]';//'
+                        tmp[1:C-1] += beta_vec[indices[i,r]] .* L_sigma;
+                        beta[:,r] = softmax(tmp + delta[r] .* cov[i] + gamma[r] .* expertise[i]); 
+                    }
+                }
+
+                theta = beta*X[i-start+1,:];
+                lpmf += multinomial_lpmf(NC[i,:] | theta);
+            }
+            return lpmf;
+    }
+}
+
+data {
+    int<lower=1> n_units;
+    int<lower=2> R;
+    int<lower=2> C;
+
+    array[n_units,R] int NR;
+    array[n_units,C] int NC;
+    array[n_units] vector[C] cov;
+    array[n_units] vector[C] expertise;
+    matrix<lower=0,upper=1>[R,C] nu;
+
+    int<lower=1> threads;
+}
+
+transformed data {
+    array [n_units] int population;
+    array[n_units] vector<lower=0,upper=1>[R] X;
+    array[n_units,R] int indices;
+    int largest_index = 1;
+    
+    for (i in 1:n_units) {
+        population[i] = sum(NR[i,:]);
+        for (r in 1:R) {
+            X[i,r] = (NR[i,r]*1.0)/(population[i]*1.0);
+
+            if (NR[i,r] > 0) {
+                indices[i,r] = largest_index;
+                largest_index += 1;
+            }
+            else {
+                indices[i,r] = 0;
+            }
+        }
+    }
+
+    print("largest index:", largest_index);
+    print("RxN: ", R*n_units);
+}
+
+parameters {
+    matrix[R, C-1] mu_;
+    array[R] vector[C] delta;
+    array[R] vector[C] gamma;
+    array [largest_index] vector[C-1] beta_vec;
+
+    real delta_0;
+    real delta_nu;
+    //real gamma_nu;
+    real mu_nu;
+
+    //cholesky_factor_corr[C-1] L_Omega;
+    vector<lower=0>[C-1] L_sigma;
+}
+
+model {
+    matrix[R, C] mu;
+    for (r in 1:R) {
+        mu[r,:C-1] = mu_[r];
+        mu[r,C] = 0;
+        mu[r,:] += mu_nu*nu[r,:];
+    }
+
+    //matrix[C-1, C-1] L_Sigma = diag_pre_multiply(L_sigma, L_Omega);
+    //L_Omega ~ lkj_corr_cholesky(10);
+    L_sigma ~ exponential(1);
+    
+    for (r in 1:R) {
+        mu_[r] ~ normal(0, 1);
+        delta[r] ~ normal(delta_0+delta_nu*nu[r,:], 1);
+        gamma[r] ~ normal(0, 1);
+    }
+
+    delta_0 ~ normal(0, 1);
+    delta_nu ~ normal(0, 1);
+    mu_nu ~ normal(0, 1);
+
+    target += reduce_sum(
+        partial_sum_lpdf, X, n_units%/%(threads*40),
+        R, C, indices, NR, NC, cov, expertise,
+        mu, L_sigma,
+        delta, gamma,
+        beta_vec
+    );
+}
+
+generated quantities {
+    array [n_units,R] vector[C] beta;
+    array [R] vector[C] counts;
+
+    for (r in 1:R) {
+        counts[r] = rep_vector(0, C);
+    }
+
+    vector [C] tmp = rep_vector(0, C);
+
+    matrix[R, C] mu;
+    for (r in 1:R) {
+        mu[r,:C-1] = mu_[r];
+        mu[r,C] = 0;
+        mu[r,:] += mu_nu*nu[r,:];
+    }
+
+    
+    for (r in 1:R) {
+        for (i in 1:n_units) {
+            if (NR[i,r] == 0) {
+                beta[i,r] = rep_vector(0, C);
+            }
+            else {
+                tmp = mu[r,:]';//'
+                tmp[1:C-1] += beta_vec[indices[i,r]] .* L_sigma;
+                beta[i,r] = softmax(tmp + delta[r] .* cov[i] + gamma[r] .* expertise[i]);
+                counts[r] += NR[i,r]*beta[i,r,:];
+            }
+            
+        }
+        counts[r] = counts[r]/sum(population);
+    }
+}

+ 277 - 0
code/ei_map.py

@@ -0,0 +1,277 @@
+from socketserver import ThreadingUnixStreamServer
+import pandas as pd
+import numpy as np
+
+import torch
+from sklearn.model_selection import train_test_split, KFold
+
+from scipy.special import softmax
+
+import argparse
+import pickle
+
+from os.path import join as opj, basename
+
+import seaborn as sns
+from matplotlib import pyplot as plt
+import matplotlib
+matplotlib.use("pgf")
+matplotlib.rcParams.update(
+    {
+        "pgf.texsystem": "xelatex",
+        "font.family": "serif",
+        "font.serif": "Times New Roman",
+        "text.usetex": True,
+        "pgf.rcfonts": False,
+    }
+)
+plt.rcParams["text.latex.preamble"].join([
+        r"\usepackage{amsmath}",              
+        r"\setmainfont{amssymb}",
+])
+
+from multiprocessing import Pool
+from functools import partial
+
+
+class TrajectoryModel(torch.nn.Module):
+    def __init__(self, N, R, C, nu):
+        super().__init__()
+
+        self.N = N
+        self.R = R
+        self.C = C
+        self.nu = nu
+
+        self.dtype = torch.float
+
+        self.init_weights()
+
+        torch.autograd.set_detect_anomaly(True)
+
+        if torch.cuda.is_available():
+            self.device = torch.device("cuda")
+            self.to(self.device)
+        else:
+            print("GPU is not available, using CPU instead")
+
+    def init_weights(self):
+        self.beta = torch.nn.Parameter(
+            torch.zeros((self.N, self.R, self.C))
+        )
+        self.mu = torch.nn.Parameter(torch.zeros((self.R, self.C-1)))
+        self.gamma = torch.nn.Parameter(torch.zeros((self.R, self.C)))
+        self.delta = torch.nn.Parameter(torch.zeros((self.R, self.C)))
+        self.eps = torch.nn.Parameter(
+            torch.zeros((self.N, self.R, self.C-1))
+        )
+        self.sigma = torch.nn.Parameter(torch.zeros((R,C-1)))
+        self.mu_nu = torch.nn.Parameter(torch.zeros(1))
+
+    def train(self, train, validation, epoch=50, autostop=False, printouts=1000):
+        optimizer = torch.optim.Adam(self.parameters(), lr=0.01)
+        # clipping_value = 1e6
+
+        epochs = []
+        train_loss = []
+        validation_loss = []
+        reference_loss = []
+
+        for t in range(epoch):
+            # print("mu:", self.mu)
+            
+            loss = self.loss(train)
+            print(t, loss.item())
+
+            optimizer.zero_grad()
+            loss.backward()
+
+            optimizer.step()
+
+            if (t % 2) == 0:
+                # beta = (torch.einsum("ld,lij->dij", self.M, self.lambd)).detach().numpy()
+                y_pred_train = self.predict(train).detach().numpy()
+                y_pred_validation = self.predict(validation).detach().numpy()
+
+                epochs.append(t)
+                train_loss.append((np.abs(train["y"]-y_pred_train).sum(axis=1)/2).mean())
+                validation_loss.append((np.abs(validation["y"]-y_pred_validation).sum(axis=1)/2).mean())
+                reference_loss.append((np.abs(validation["y"]-validation["x"]).sum(axis=1)/2).mean())
+
+                if (t % printouts) == 0:
+                    fig, ax = plt.subplots(figsize=[6.4*0.75, 4.8*0.75])
+                    ax.plot(np.array(epochs), train_loss, label="$d_{\\mathrm{TV}}(\\vec{y}_a,\\vec{y}_a^{\\mathrm{pred}})$ -- training set")
+                    ax.plot(np.array(epochs), validation_loss, label="$d_{\\mathrm{TV}}(\\vec{y}_a,\\vec{y}_a^{\\mathrm{pred}})$ -- test set")
+                    ax.plot(np.array(epochs), reference_loss, label="$d_{\\mathrm{TV}}(\\vec{y}_a,\\vec{x}_a)$ -- test set")
+                    ax.set_xlabel("Epochs")
+                    ax.set_ylabel("Performance (total variation distance)")
+                    ax.legend(loc='upper right', bbox_to_anchor=(1, 1.2))
+                    ax.set_ylim(0.3,0.6)
+                    fig.savefig(f"status_{basename(args.input)}.eps", bbox_inches="tight")
+
+            if autostop and len(validation_loss)>2 and validation_loss[-1]>validation_loss[-2] and validation_loss[-2]>validation_loss[-3]:
+                break
+
+        return train_loss, validation_loss, reference_loss
+
+    def predict(self, data, eps=None):
+        N = data["N"]
+
+        mu = torch.zeros((self.R, self.C))
+        mu[:,:-1] = self.mu
+        mu += self.nu*self.mu_nu
+
+        s = torch.zeros((N, self.R, self.C))
+        s = s+torch.einsum("ij,aj->aij", self.gamma, data["expertise"])
+        s = s+torch.einsum("ij,aj->aij", self.delta, data["cov"])
+        s = s+mu
+
+        if eps is not None:
+            eps_ = torch.zeros((N, self.R, self.C))
+            eps_[:,:,:-1] = eps
+            s += eps_
+
+        b = torch.softmax(s, dim=2)
+        p = torch.einsum("aij,ai->aj", b, data["x"])
+        return p
+
+    def loss(self, data):
+        Y = data["Y"]
+        
+        loss = 0
+
+        p = self.predict(data, self.eps)
+        
+        for a in range(p.shape[0]):
+            multi = torch.distributions.multinomial.Multinomial(
+                total_count=Y[a,:].sum().max().item(),
+                probs=p[a,:]
+            )
+            loss -= multi.log_prob(Y[a,:]).sum()
+
+        print("evidence loss: ", loss/data["N"])
+
+        eps_prior = torch.distributions.normal.Normal(0, self.sigma.exp())
+        sigma_prior = torch.distributions.exponential.Exponential(1)
+        normal_prior = torch.distributions.normal.Normal(0, 1)
+
+        priors_loss = 0
+        priors_loss -= eps_prior.log_prob(self.eps).sum()
+        priors_loss -= sigma_prior.log_prob(self.sigma.exp()).sum()
+        priors_loss -= normal_prior.log_prob(self.mu).sum()
+        priors_loss -= normal_prior.log_prob(self.delta).sum()
+        priors_loss -= normal_prior.log_prob(self.gamma).sum()
+        priors_loss -= normal_prior.log_prob(self.mu_nu).sum()
+        
+        print("priors loss:", priors_loss/data["N"])
+
+        loss += priors_loss
+        loss /= data["N"]
+
+        return loss
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input")
+parser.add_argument("--folds", default=0, type=int)
+
+args = parser.parse_args()
+
+n_topics = len(pd.read_csv(opj(args.input, "topics.csv")))
+
+df = pd.read_csv(opj(args.input, "aggregate.csv"))
+df = df[df[[f"start_{k+1}" for k in range(n_topics)]].sum(axis=1) >= 100]
+
+resources = pd.read_parquet(opj(args.input, "pooled_resources.parquet"))
+df = df.merge(resources, left_on="bai", right_on="bai")
+
+data = {
+    "NR": np.stack(df[[f"start_{k+1}" for k in range(n_topics)]].values).astype(int),
+    "NC": np.stack(df[[f"end_{k+1}" for k in range(n_topics)]].values).astype(int),
+    "expertise": np.stack(df[[f"expertise_{k+1}" for k in range(n_topics)]].fillna(0).values),
+}
+
+data["cov"] = np.stack(df["pooled_resources"])
+
+junk = np.sum(data["NR"] + data["NC"], axis=0) == 0
+
+for col in ["NR", "NC", "cov", "expertise"]:
+    data[col] = data[col][:, ~junk]
+
+R = n_topics-junk.sum()
+C = n_topics-junk.sum()
+
+data["cov"] = np.nan_to_num(data["cov"])# / np.maximum(data["cov"].sum(axis=1)[:, np.newaxis], 1)
+data["expertise"] = np.nan_to_num(data["expertise"])# / np.maximum(data["cov"].sum(axis=1)[:, np.newaxis], 1)
+
+
+expertise = data["expertise"]
+nu = np.array([
+    [((expertise[:,i]>expertise[:,i].mean())&(expertise[:,j]>expertise[:,j].mean())).mean()/(expertise[:,i]>expertise[:,i].mean()).mean() for j in range(R)]
+    for i in range(R)
+])
+
+data["Y"] = data["NC"]
+data["x"] = data["NR"]/data["NR"].sum(axis=1)[:,np.newaxis]
+data["y"] = data["NC"]/data["NC"].sum(axis=1)[:,np.newaxis]
+
+N = data["x"].shape[0]
+
+def split_train_validation(data, test_size):
+    train_ind, test_ind = train_test_split(np.arange(N), test_size=test_size)
+
+    train, validation = {}, {}
+    for k in data:
+        train[k] = torch.from_numpy(data[k][train_ind])
+        validation[k] = torch.from_numpy(data[k][test_ind])
+
+    return train, validation
+
+def folds(data, folds):
+    f = []
+    kf = KFold(n_splits=folds, shuffle=True)
+
+    for i, (train_ind, test_ind) in enumerate(kf.split(np.arange(N))):
+        fold_train, fold_test = {}, {}
+        for k in data:
+            fold_train[k] = torch.from_numpy(data[k][train_ind])
+            fold_test[k] = torch.from_numpy(data[k][test_ind])
+
+        f.append((fold_train, fold_test))
+
+    return f
+
+def run_model(data):
+    data[0]["N"] = data[0]["x"].shape[0]
+    data[1]["N"] = data[1]["x"].shape[0]
+
+    mdl = TrajectoryModel(data[0]["N"], R, C, torch.from_numpy(nu))
+    train_loss, validation_loss, reference_loss = mdl.train(
+        data[0], data[1],
+        epoch=1000,
+        autostop=True,
+        printouts=50
+    )
+    scores = [
+        train_loss[-1].detach().numpy(), validation_loss[-1].detach().numpy(), reference_loss[-1].detach().numpy()
+    ]
+    print(scores)
+    return scores
+
+if args.folds > 0:
+    f = folds(data, args.folds)
+
+    with Pool(processes=args.folds) as pool:
+        scores = pool.map(run_model, f)
+
+    scores = np.array(scores)
+
+    np.save(opj(args.input, f"scores.npy"), scores)
+
+else:
+    train, validation = split_train_validation(data, test_size=0.2)
+    train["N"] = train["x"].shape[0]
+    validation["N"] = validation["x"].shape[0]
+
+    mdl = TrajectoryModel(train["N"], R, C, torch.from_numpy(nu))
+    mdl.train(train, validation, epoch=800)
+

+ 89 - 0
code/entered.stan

@@ -0,0 +1,89 @@
+functions {
+    vector z_scale(vector x) {
+        return (x-mean(x))/sd(x);
+    }
+}
+
+data {
+    int<lower=1> N;
+    int<lower=1> K;
+    vector<lower=0>[N] soc_cap;
+    vector<lower=0>[N] soc_div;
+    vector<lower=0>[N] int_div;
+    vector[N] res_soc_div;
+    //vector<lower=0>[N] age;
+    array[N] int<lower=0,upper=1> m;
+    matrix<lower=0,upper=1>[N,K] x;
+    vector[N] stable;
+    array [N] int<lower=0,upper=K-1> primary_research_area;
+}
+
+transformed data {
+    //vector[N] z_m = z_scale(m);
+    vector[N] z_soc_cap = z_scale(soc_cap);
+    vector[N] z_soc_div = z_scale(soc_div);
+    vector[N] z_int_div = z_scale(int_div);
+    vector[N] z_res_soc_div = z_scale(res_soc_div);
+
+}
+
+parameters {
+    real beta_soc_cap;
+    real beta_soc_div;
+    real beta_int_div;
+    real beta_stable;
+    vector[K] beta_x;
+
+    real mu;
+    real<lower=0> tau;
+    real<lower=1> sigma;
+
+    // vector[K] mu_x;
+    // vector<lower=0>[K] sigma_x;
+    // real mu_pop;
+    // real<lower=0> sigma_pop;
+}
+
+model {
+    vector[N] beta_research_area;
+    for (k in 1:N) {
+        beta_research_area[k] = tau*beta_x[primary_research_area[k]+1];
+    }
+
+    beta_soc_cap ~ normal(0, 1);
+    beta_soc_div ~ normal(0, 1);
+    beta_int_div ~ normal(0, 1);
+    beta_x ~ double_exponential(0, 1);
+    beta_stable ~ normal(0, 1);
+
+    mu ~ normal(0, 1);
+    tau ~ exponential(1);
+    sigma ~ pareto(1,1.5);
+
+    m ~ bernoulli_logit(beta_soc_cap*z_soc_cap + beta_soc_div*z_res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu);
+
+    // m ~ beta(mu_pop, sigma_pop);
+    // for (k in 1:N) {
+    //    m[k] ~ normal(mu_x[primary_research_area[k]+1], sigma_x[primary_research_area[k]+1]);
+    // }
+    // mu_x ~ normal(0, 1);
+    // mu_pop ~ normal(0, 1);
+    // sigma_x ~ exponential(1);
+    // sigma_pop ~ exponential(1);
+}
+
+generated quantities {
+    // real R2 = 0;
+    // {
+    //     vector[N] beta_research_area;
+    //     for (k in 1:N) {
+    //         beta_research_area[k] = beta_x[primary_research_area[k]+1];
+    //     }
+    //     vector[N] pred = inv_logit(beta_soc_cap*z_soc_cap + beta_soc_div*z_res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu);
+
+    //     R2 = mean(square(z_m-pred));
+    //     R2 = 1-R2;
+    // }
+    
+}
+

+ 427 - 0
code/etm.py

@@ -0,0 +1,427 @@
+from AbstractSemantics.terms import TermExtractor
+from AbstractSemantics.embeddings import GensimWord2Vec
+from gensim.models import KeyedVectors
+
+import nltk
+
+import pandas as pd
+import numpy as np
+
+from scipy.sparse import csr_matrix
+
+from os.path import join as opj
+from os.path import exists
+
+import itertools
+from functools import partial
+from collections import defaultdict
+
+import re
+
+import multiprocessing as mp
+
+# from matplotlib import pyplot as plt
+
+import argparse
+import yaml
+import sys
+import pickle
+
+from gensim.models.callbacks import CallbackAny2Vec
+
+
+class MonitorCallback(CallbackAny2Vec):
+    def __init__(self, test_words):
+        self._test_words = test_words
+        self.epoch = 0
+
+    def on_epoch_end(self, model):
+        loss = model.get_latest_training_loss()
+        if self.epoch == 0:
+            print("Loss after epoch {}: {}".format(self.epoch, loss))
+        else:
+            print(
+                "Loss after epoch {}: {}".format(
+                    self.epoch, loss - self.loss_previous_step
+                )
+            )
+
+        self.epoch += 1
+        self.loss_previous_step = loss
+
+        for word in self._test_words:  # show wv logic changes
+            print(f"{word}: {model.wv.most_similar(word)}")
+
+
+def filter_ngrams(l, wl):
+    return [ngram for ngram in l if ngram in wl]
+
+
+def construct_bow(l, n):
+    items = list(set(l))
+    return np.array(items), np.array([l.count(i) for i in items])
+
+
+def ngram_inclusion(i, js):
+    return [
+        j.find(i) >= 0  # matching
+        and bool(re.search(f"(^|\_){re.escape(i)}($|\_)", j))
+        and (j.count("_") == i.count("_") + 1)
+        and (
+            (i.count("_") >= 1)
+            or bool(re.search(f"(^|\_){re.escape(i)}$", j))
+            or bool(re.search(f"^{re.escape(i)}($|\_)", j))
+        )
+        for j in js
+    ]
+
+
+if __name__ == "__main__":
+
+    parser = argparse.ArgumentParser("CT Model")
+    parser.add_argument("location", help="model directory")
+    parser.add_argument(
+        "filter", choices=["categories", "keywords", "no-filter"], help="filter type"
+    )
+    parser.add_argument("--values", nargs="+", default=[], help="filter allowed values")
+
+    parser.add_argument("--dataset", default="inspire-harvest/database")
+
+
+    # sample size
+    parser.add_argument("--samples", type=int, default=50000)
+    parser.add_argument("--constant-sampling", type=int, default=0)
+
+    # text pre-processing
+    parser.add_argument(
+        "--add-title", default=False, action="store_true", help="include title"
+    )
+    parser.add_argument(
+        "--remove-latex", default=False, action="store_true", help="remove latex"
+    )
+    parser.add_argument(
+        "--lemmatize", default=False, action="store_true", help="lemmatize"
+    )
+    parser.add_argument(
+        "--limit-redundancy",
+        default=False,
+        action="store_true",
+        help="limit redundancy",
+    )
+    parser.add_argument("--blacklist", default=None, help="blacklist")
+
+    # embeddings
+    parser.add_argument("--dimensions", type=int, default=50)
+    parser.add_argument("--pre-trained-embeddings", default=False, action="store_true")
+    parser.add_argument("--use-saved-embeddings", default=False, action="store_true")
+
+    # topic model parameters
+    parser.add_argument("--topics", type=int, default=25)
+    parser.add_argument("--min-df", type=float, default=0.001)
+    parser.add_argument("--max-df", type=float, default=0.15)
+
+    parser.add_argument("--threads", type=int, default=4)
+
+    args = parser.parse_args(
+        [
+            "output/acl_2002_2022",
+            "no-filter",
+            "--dataset",
+            "../acl",
+            "--constant-sampling",
+            "12000",
+            "--samples",
+            "300000",
+            "--threads",
+            "30",
+            "--add-title",
+            "--remove-latex",
+            "--dimensions",
+            "50",
+            "--topics",
+            "20",
+            "--min-df",
+            "0.00075",
+            "--lemmatize",
+            "--pre-trained-embeddings",
+            # "--limit-redundancy"
+            "--use-saved-embeddings"
+            # "--blacklist",
+            # "output/medialab/blacklist",
+        ]
+    )
+
+    with open(opj(args.location, "params.yml"), "w+") as fp:
+        yaml.dump(args, fp)
+
+    articles = pd.read_parquet(
+        opj(args.dataset, "articles.parquet")
+    )[["title", "abstract", "article_id", "date_created", "categories"]]
+
+    if args.add_title:
+        articles["abstract"] = articles["abstract"].str.cat(articles["title"], sep=". ")
+
+    articles.drop(columns=["title"], inplace=True)
+
+    if args.remove_latex:
+        articles["abstract"] = articles["abstract"].apply(
+            lambda s: re.sub("$[^>]+$", "", s)
+        )
+
+    articles["abstract"] = articles["abstract"].apply(
+        lambda s: re.sub(r"\b\\\w+", "", s)
+    )
+    articles["abstract"] = articles["abstract"].apply(
+        lambda s: re.sub("[^0-9a-zA-Z--- -\.]+", "", s)
+    )
+    # articles["abstract"] = articles["abstract"].str.replace("-", " ") # NEW
+
+    articles = articles[articles["abstract"].map(len) >= 100]
+    articles["abstract"] = articles["abstract"].str.lower()
+
+    articles = articles[articles["date_created"].str.len() >= 4]
+
+    if "year" not in articles.columns:
+        articles["year"] = articles["date_created"].str[:4].astype(int) - 2000
+        articles = articles[(articles["year"] >= 0) & (articles["year"] <= 40)]
+    else:
+        articles["year"] = articles["year"].astype(int)
+        articles = articles[articles["year"]>=2002]
+    
+    articles["year_group"] = articles["year"] // 5
+    keep = pd.Series([False] * len(articles), index=articles.index)
+
+    print("Applying filter...")
+    if args.filter == "keywords":
+        for value in args.values:
+            keep |= articles["abstract"].str.contains(value)
+    elif args.filter == "categories":
+        for value in args.values:
+            keep |= articles["categories"].apply(lambda l: value in l)
+    elif args.filter == "no-filter":
+        keep |= True
+
+    articles = articles[keep == True].sample(frac=1)
+
+    if args.constant_sampling > 0:
+        articles = articles.groupby("year").head(args.constant_sampling)
+
+    articles = articles.sample(frac=1).head(args.samples)
+
+    articles.reset_index(inplace=True)
+    articles[["article_id"]].to_csv(opj(args.location, "articles.csv"))
+    print(articles)
+
+    print("Extracting n-grams...")
+    extractor = TermExtractor(
+        articles["abstract"].tolist(),
+        limit_redundancy=args.limit_redundancy,
+        patterns=[
+            ["JJ.*"],
+            ["NN.*"],
+            ["JJ.*", "NN.*"],
+            ["JJ.*", "NN.*", "NN.*"],
+            # ["JJ.*", "NN", "CC", "NN.*"],
+            # ["JJ.*", "NN.*", "JJ.*", "NN.*"],
+            # ["RB.*", "JJ.*", "NN.*", "NN.*"],
+        ],
+    )
+
+    ngrams = extractor.ngrams(
+        threads=args.threads,
+        lemmatize=args.lemmatize,
+        lemmatize_ngrams=args.lemmatize,
+        split_sentences=args.pre_trained_embeddings and not args.use_saved_embeddings,
+    )
+
+    del extractor
+    del articles["abstract"]
+
+    if args.pre_trained_embeddings and not args.use_saved_embeddings:
+        ngrams = map(
+            lambda l: [
+                [
+                    ("_".join(n))
+                    .strip()
+                    .replace("-", "_")
+                    .replace(".._", "")
+                    .replace("_..", "")
+                    for n in sent
+                ]
+                for sent in l
+            ],
+            ngrams,
+        )
+        ngrams = list(ngrams)
+
+        print("Pre-training embeddings...")
+        emb = GensimWord2Vec(
+            [sentence for sentences in ngrams for sentence in sentences]
+        )
+        model = emb.create_model(
+            vector_size=args.dimensions,
+            window=5,
+            workers=args.threads,
+            compute_loss=True,
+            # epochs=90,
+            # min_count=30,
+            epochs=80,
+            min_count=15,
+            sg=1,
+            callbacks=[
+                MonitorCallback(
+                    [
+                        "transformer",
+                        "embedding",
+                        "syntax",
+                        "grammar"
+                    ]
+                )
+            ],
+        )
+        model.wv.save_word2vec_format(opj(args.location, "embeddings.bin"), binary=True)
+        del model
+
+        ngrams = [
+            list(itertools.chain.from_iterable(article_sentences))
+            for article_sentences in ngrams
+        ]
+    else:
+        ngrams = map(
+            lambda l: [
+                "_".join(n)
+                .strip()
+                .replace("-", "_")
+                .replace(".._", "")
+                .replace("_..", "")
+                for n in l
+            ],
+            ngrams,
+        )
+
+    ngrams = list(ngrams)
+
+    print("Deriving vocabulary...")
+    voc = defaultdict(int)
+
+    for article_ngrams in ngrams:
+        _ngrams = set(article_ngrams)
+        for ngram in _ngrams:
+            voc[ngram] += 1
+
+    voc = pd.DataFrame({"ngram": voc.keys(), "count": voc.values()})
+
+    voc["df"] = voc["count"] / len(articles)
+    voc.set_index("ngram", inplace=True)
+
+    if args.min_df < 1:
+        voc = voc[voc["df"] >= args.min_df]
+    else:
+        voc = voc[voc["count"] >= args.min_df]
+
+    if args.max_df < 1:
+        voc = voc[voc["df"] <= args.max_df]
+    else:
+        voc = voc[voc["count"] <= args.max_df]
+
+    voc["len"] = voc.index.map(len)
+    voc = voc[voc["len"] >= 2]
+
+    stop_words = nltk.corpus.stopwords.words("english")
+    voc = voc[~voc.index.isin(stop_words)]
+
+    if args.blacklist is not None:
+        print("Filtering black-listed keywords...")
+        blacklist = pd.read_csv(args.blacklist)["ngram"].tolist()
+        voc = voc[
+            voc.index.map(lambda s: not any([ngram in s for ngram in blacklist]))
+            == True
+        ]
+        print("Filtering completed.")
+
+    voc = voc.sort_values("df", ascending=False)
+    voc.to_csv(opj(args.location, "ngrams.csv"))
+
+    voc = pd.read_csv(opj(args.location, "ngrams.csv"), keep_default_na=False)[
+        "ngram"
+    ].tolist()
+
+    vocabulary = {n: i for i, n in enumerate(voc)}
+
+    print("Filtering n-grams...")
+    with mp.Pool(processes=args.threads) as pool:
+        ngrams = pool.map(partial(filter_ngrams, wl=voc), ngrams)
+
+    print("Constructing bag-of-words...")
+    bow = [[vocabulary[ngram] for ngram in _ngrams] for _ngrams in ngrams]
+
+    # if args.limit_redundancy:
+    #     print("Building 'within' matrix...")
+    #     with mp.Pool(processes=args.threads) as pool:
+    #         within = pool.map(partial(ngram_inclusion, js=voc), voc)
+
+    #     within = np.array(within).astype(int)
+
+    #     print("Removing double-counting...")
+    #     bow = csr_matrix(bow)
+    #     double_counting = bow.dot(csr_matrix(within.T))
+    #     bow = bow - double_counting
+    #     print(double_counting.sum(), "redundant keywords removed")
+    #     del double_counting
+    #     bow = bow.todense()
+    #     print((bow <= -1).sum(), "keywords had negative counts after removal")
+
+    del ngrams
+
+    with mp.Pool(processes=args.threads) as pool:
+        bow = pool.map(partial(construct_bow, n=len(voc)), bow)
+
+    keep = [i for i in range(len(bow)) if len(bow[i][0]) > 0]
+
+    articles = articles.iloc[keep]
+    articles[["article_id"]].to_csv(opj(args.location, "articles.csv"))
+
+    bow = [bow[i] for i in keep]
+
+    dataset = {
+        "tokens": [bow[i][0] for i in range(len(bow))],
+        "counts": [bow[i][1] for i in range(len(bow))],
+        "article_id": articles["article_id"],
+    }
+
+    del bow
+
+    with open(opj(args.location, "dataset.pickle"), "wb") as handle:
+        pickle.dump(dataset, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+    print("Training...")
+    from embedded_topic_model.models.etm import ETM
+
+    etm_instance = ETM(
+        voc,
+        num_topics=args.topics,
+        rho_size=args.dimensions,
+        emb_size=args.dimensions,
+        epochs=25,
+        debug_mode=True,
+        train_embeddings=not args.pre_trained_embeddings,
+        model_path=opj(args.location, "model"),
+        embeddings=opj(args.location, "embeddings.bin")
+        if args.pre_trained_embeddings
+        else None,
+        use_c_format_w2vec=True,
+    )
+
+    etm_instance.fit(dataset)
+
+    with open(opj(args.location, "etm_instance.pickle"), "wb") as handle:
+        pickle.dump(etm_instance, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+    topics = etm_instance.get_topics(20)
+    print(topics)
+    topic_coherence = etm_instance.get_topic_coherence()
+    print(topic_coherence)
+    topic_diversity = etm_instance.get_topic_diversity()
+    print(topic_diversity)
+
+

+ 103 - 0
code/etm_compile.py

@@ -0,0 +1,103 @@
+import pandas as pd
+import pickle
+import numpy as np
+from scipy.stats import entropy
+
+from os.path import join as opj
+
+import argparse
+
+from tqdm import trange
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input")
+parser.add_argument("--dataset", default="inspire-harvest/database")
+
+parser.add_argument("--write-topics", action="store_true", default=False)
+parser.add_argument("--debug", action="store_true", default=False)
+args = parser.parse_args()
+
+ngrams = pd.read_csv(opj(args.input, "ngrams.csv"))
+
+with open(opj(args.input, "dataset.pickle"), "rb") as handle:
+    data = pickle.load(handle)
+
+with open(opj(args.input, "etm_instance.pickle"), "rb") as handle:
+    etm_instance = pickle.load(handle)
+
+articles = pd.read_parquet(opj(args.dataset, "articles.parquet"))[["article_id", "date_created"]]
+articles = articles[articles["date_created"].str.len() >= 4]
+if "year" not in articles.columns:
+    articles["year"] = articles["date_created"].str[:4].astype(int)-2000
+
+articles["article_id"] = articles.article_id.astype(int)
+_articles = pd.read_csv(opj(args.input, "articles.csv"))
+articles = _articles.merge(articles, how="inner")
+years = articles["year"].values
+
+if args.write_topics:
+
+    top_words = [",".join(l) for l in etm_instance.get_topics(20)]
+
+    topics = pd.DataFrame({"top_words": top_words})
+    topics["label"] = ""
+    topics.to_csv(opj(args.input, "topics.csv"))
+
+topics = pd.read_csv(opj(args.input, "topics.csv"))
+theta = etm_instance.get_document_topic_dist()
+
+is_junk_topic = np.array(topics["label"].str.contains("Junk"))
+
+if args.debug:
+    import seaborn as sns
+
+    sns.heatmap(
+        np.corrcoef(theta, theta, rowvar=False), vmin=-0.5, vmax=0.5, cmap="RdBu"
+    )
+    plt.show()
+
+topic_counts = np.zeros((theta.shape[0], theta.shape[1]))
+p_w_z = etm_instance.get_topic_word_dist()
+print("Computing P(w|d) matrix...")
+p_w_d = theta @ p_w_z
+
+keywords = np.zeros((articles.year.max()+1, p_w_z.shape[1], p_w_z.shape[0]))
+
+for d in trange(theta.shape[0]):
+    for i, w in enumerate(data["tokens"][d]):
+        p = p_w_z[:, w] * theta[d, :] / p_w_d[d, w]
+        S = np.exp(entropy(p))
+        if S >= 2:
+            if args.debug:
+                word = ngrams.iloc[w]["ngram"]
+                print(f"{word} is ambiguous, entropy={S:.2f}")
+            continue
+        else:
+            k = np.argmax(p)
+            if is_junk_topic[k]:
+                continue
+
+            topic_counts[d, k] += data["counts"][d][i]
+            if args.debug:
+                word = ngrams.iloc[w]["ngram"]
+                print(word, topics.iloc[k]["label"], data["counts"][d][i])
+
+    n_words = topic_counts[d,:].sum()
+
+    if args.debug:
+        print(n_words)
+    
+    if n_words == 0:
+        continue
+
+    for i, w in enumerate(data["tokens"][d]):
+        keywords[years[d],w,:] += topic_counts[d,:]
+    
+
+print(topic_counts)
+print(topic_counts.mean(axis=0))
+print(topic_counts.sum(axis=0))
+
+np.save(opj(args.input, "keywords.npy"), keywords)
+np.save(opj(args.input, "topics_counts.npy"), topic_counts)
+

+ 85 - 0
code/etm_ei.py

@@ -0,0 +1,85 @@
+from socketserver import ThreadingUnixStreamServer
+import pandas as pd
+import numpy as np
+
+from cmdstanpy import CmdStanModel
+
+import argparse
+import pickle
+
+from os.path import join as opj
+
+import seaborn as sns
+from matplotlib import pyplot as plt
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input")
+parser.add_argument("--n", type=int, default=200)
+parser.add_argument("--min-pop", type=int, default=100)
+parser.add_argument("--stack-rows", type=int, default=1)
+
+parser.add_argument("--chains", type=int, default=1)
+parser.add_argument("--threads-per-chain", type=int, default=4)
+parser.add_argument("--samples", type=int, default=500)
+parser.add_argument("--warmup", type=int, default=1000)
+
+args = parser.parse_args()
+
+n_topics = len(pd.read_csv(opj(args.input, "topics.csv")))
+
+df = pd.read_csv(opj(args.input, "aggregate.csv"))
+df = df[df[[f"start_{k+1}" for k in range(n_topics)]].sum(axis=1) >= args.min_pop]
+df = df.sample(n=args.n)
+
+resources = pd.read_parquet(opj(args.input, "pooled_resources.parquet"))
+df = df.merge(resources, left_on="bai", right_on="bai")
+
+data = {
+    "NR": np.stack(df[[f"start_{k+1}" for k in range(n_topics)]].values).astype(int),
+    "NC": np.stack(df[[f"end_{k+1}" for k in range(n_topics)]].values).astype(int),
+    "R": n_topics,
+    "C": n_topics,
+    "n_units": len(df),
+    "threads": args.threads_per_chain
+}
+
+data["cov"] = np.stack(df["pooled_resources"])
+
+junk = np.sum(data["NR"] + data["NC"], axis=0) == 0
+
+for col in ["NR", "NC", "cov"]:
+    data[col] = data[col][:, ~junk]
+
+data["R"] -= junk.sum()
+data["C"] -= junk.sum()
+
+data["cov"] = data["cov"] / np.maximum(data["cov"].sum(axis=1)[:, np.newaxis], 1)
+sns.heatmap(
+    np.corrcoef(data["NC"].T, data["cov"].T), vmin=-0.5, vmax=0.5, cmap="RdBu"
+)
+plt.show()
+
+
+print(data["cov"].shape)
+
+
+model = CmdStanModel(
+    stan_file=f"code/ei_cov_softmax_control.stan",
+    cpp_options={"STAN_THREADS": "TRUE"},
+    compile="force",
+)
+
+fit = model.sample(
+    data=data,
+    chains=args.chains,
+    threads_per_chain=args.threads_per_chain,
+    iter_sampling=args.samples,
+    iter_warmup=args.warmup,
+)
+
+vars = fit.stan_variables()
+samples = {}
+for (k, v) in vars.items():
+    samples[k] = v
+
+np.savez_compressed(opj(args.input, "ei_samples.npz"), **samples)

+ 169 - 0
code/etm_transfers.py

@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+import pandas as pd 
+import numpy as np
+
+import networkx as nx 
+from ipysigma import Sigma
+
+from matplotlib import pyplot as plt
+import seaborn as sns
+
+import pickle
+
+from os.path import join as opj
+
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--location")
+parser.add_argument("--dataset", default="inspire-harvest/database")
+parser.add_argument("--keywords-threshold", type=int, default=200)
+parser.add_argument("--articles-threshold", type=int, default=5)
+parser.add_argument("--late-periods", nargs="+", type=int, default=[3]) # [2,3] for ACL, [3] for HEP
+args = parser.parse_args()
+
+topics = pd.read_csv(opj(args.location, "topics.csv"))["label"].tolist()
+topic_matrix = np.load(opj(args.location, "topics_counts.npy"))
+
+articles = pd.read_parquet(opj(args.dataset, "articles.parquet"))[["article_id", "date_created", "title"]]
+articles = articles[articles["date_created"].str.len() >= 4]
+if "years" not in articles.columns:
+    articles["year"] = articles["date_created"].str[:4].astype(int)-2000
+else:
+    articles["year"] = articles["year"].astype(int)-2002
+
+articles = articles[(articles["year"] >= 0) & (articles["year"] <= 40)]
+articles["year_group"] = articles["year"]//5
+_articles = pd.read_csv(opj(args.location,"articles.csv"))
+articles["article_id"] = articles.article_id.astype(int)
+articles = _articles.merge(articles, how="left")
+print(len(_articles))
+print(len(articles))
+
+articles["main_topic"] = topic_matrix.argmax(axis=1)
+articles["main_topic"] = articles["main_topic"].map(lambda k: topics[k])
+
+print(articles[["title", "main_topic"]].sample(frac=1).head(10))
+print(articles[["title", "main_topic"]].sample(frac=1).head(10))
+
+all_authors = pd.read_parquet(opj(args.dataset, "articles_authors.parquet"))
+all_authors["article_id"] = all_authors.article_id.astype(int)
+n_authors = all_authors.groupby("article_id").agg(n_authors=("bai", lambda x: x.nunique())).reset_index()
+
+n_articles = len(articles)
+articles = articles.merge(n_authors, how="left", left_on="article_id", right_on="article_id")
+assert len(articles)==n_articles, "# of articles does not match! cannot continue"
+
+all_authors = all_authors.merge(articles, how="inner", left_on="article_id", right_on="article_id")
+all_authors["year_range"] = all_authors["year"]//5
+
+n_papers = all_authors.groupby(["bai", "year_range"]).agg(n=("article_id", "count")).reset_index()
+filtered_authors = []
+for author, n in n_papers.groupby("bai"):
+    start = n[n["year_range"]<=1]
+    # end = n[n["year_range"]==3]
+    end = n[n["year_range"].isin(args.late_periods)]
+    if len(start) and len(end):
+        filtered_authors.append({
+            "author": author,
+            "n_start": start.iloc[0]["n"],
+            "n_end": end.iloc[0]["n"],
+        })
+
+filtered_authors = pd.DataFrame(filtered_authors)
+filtered_authors = filtered_authors[(filtered_authors["n_start"] >= args.articles_threshold) & (filtered_authors["n_end"] >= args.articles_threshold)]
+authors=all_authors[all_authors["bai"].isin(filtered_authors["author"])]
+start_authors = authors[authors["year_range"]<=1]
+# end_authors = authors[authors["year_range"]==3]
+end_authors = authors[authors["year_range"].isin(args.late_periods)]
+
+authorlist = list(authors["bai"].unique())
+inv_articles = {n: i for i,n in enumerate(articles["article_id"].values)}
+inv_authorlist = {author: i for i, author in enumerate(authorlist)}
+n_authors = len(authorlist)
+
+n_clusters = topic_matrix.shape[1]
+n_years = articles["year"].max()+1
+
+start = np.zeros((n_authors, n_clusters))
+end = np.zeros((n_authors, n_clusters))
+expertise = np.zeros((n_authors, n_clusters))
+
+start_count = np.zeros(n_authors)
+end_count = np.zeros(n_authors)
+expertise_norm = np.zeros(n_authors)
+
+for author, _articles in start_authors.groupby("bai"):
+    for article_id in _articles["article_id"].tolist():
+        start[inv_authorlist[author],:] += topic_matrix[inv_articles[article_id],:].flat
+        start_count[inv_authorlist[author]] += topic_matrix[inv_articles[article_id],:].sum()
+
+        n = articles.iloc[inv_articles[article_id]]["n_authors"]
+        expertise[inv_authorlist[author]] += (1/n)*topic_matrix[inv_articles[article_id],:].flat
+        expertise_norm[inv_authorlist[author]] += (1/n)*topic_matrix[inv_articles[article_id],:].sum()
+        
+for author, _articles in end_authors.groupby("bai"):
+    for article_id in _articles["article_id"].tolist():
+        end[inv_authorlist[author],:] += topic_matrix[inv_articles[article_id],:].flat
+        end_count[inv_authorlist[author]] += topic_matrix[inv_articles[article_id],:].sum()
+
+authors_records = {}
+
+for author, _articles in all_authors.groupby("bai"):
+    record = np.zeros((n_years, n_clusters))
+    record_count = np.zeros((n_years, n_clusters))
+
+    for article in _articles.to_dict(orient="records"):
+        year = article["year"]
+        article_id = article["article_id"]
+        
+        record[year,:] += topic_matrix[inv_articles[article_id],:].flat
+        record_count[year] += topic_matrix[inv_articles[article_id],:].sum()
+
+    authors_records[author] = {
+        "record": record,
+        "record_count": record_count
+    }
+
+with open(opj(args.location, "authors_full_records.pickle"), "wb") as handle:
+    pickle.dump(authors_records, handle, protocol=pickle.HIGHEST_PROTOCOL)
+
+ok = (start_count>=args.keywords_threshold)&(end_count>=args.keywords_threshold)
+
+cluster_names_start = [f"start_{n+1}" for n in range(n_clusters)]
+cluster_names_end = [f"end_{n+1}" for n in range(n_clusters)]
+cluster_names_expertise = [f"expertise_{n+1}" for n in range(n_clusters)]
+
+start = start[ok]
+end = end[ok]
+start_count = start_count[ok]
+end_count = end_count[ok]
+expertise = expertise[ok]/expertise_norm[ok][:,np.newaxis]
+
+start_norm = (start/start_count[:,np.newaxis])
+end_norm = (end/end_count[:,np.newaxis])
+
+
+print(start_norm.shape)
+print(end_norm.shape)
+
+print(start_norm.mean(axis=0))
+print(end_norm.mean(axis=0))
+
+aggregate = {}
+for i in range(n_clusters):
+    aggregate[cluster_names_start[i]] = start[:,i]
+    aggregate[cluster_names_end[i]] = end[:,i]
+    aggregate[cluster_names_expertise[i]] = expertise[:,i]
+
+aggregate = pd.DataFrame(aggregate)
+aggregate["bai"] = [bai for i, bai in enumerate(authorlist) if ok[i]]
+
+aggregate.to_csv(opj(args.location, "aggregate.csv"))
+
+sns.heatmap(np.corrcoef(start_norm.T, end_norm.T), vmin=-0.5, vmax=0.5, cmap="RdBu")
+plt.show()
+
+

+ 89 - 0
code/exited.stan

@@ -0,0 +1,89 @@
+functions {
+    vector z_scale(vector x) {
+        return (x-mean(x))/sd(x);
+    }
+}
+
+data {
+    int<lower=1> N;
+    int<lower=1> K;
+    vector<lower=0>[N] soc_cap;
+    vector<lower=0>[N] soc_div;
+    vector<lower=0>[N] int_div;
+    vector[N] res_soc_div;
+    //vector<lower=0>[N] age;
+    array[N] int<lower=0,upper=1> m;
+    matrix<lower=0,upper=1>[N,K] x;
+    vector[N] stable;
+    array [N] int<lower=0,upper=K-1> primary_research_area;
+}
+
+transformed data {
+    //vector[N] z_m = z_scale(m);
+    vector[N] z_soc_cap = z_scale(soc_cap);
+    vector[N] z_soc_div = z_scale(soc_div);
+    vector[N] z_int_div = z_scale(int_div);
+    vector[N] z_res_soc_div = z_scale(res_soc_div);
+
+}
+
+parameters {
+    real beta_soc_cap;
+    real beta_soc_div;
+    real beta_int_div;
+    real beta_stable;
+    vector[K] beta_x;
+
+    real mu;
+    real<lower=0> tau;
+    real<lower=1> sigma;
+
+    // vector[K] mu_x;
+    // vector<lower=0>[K] sigma_x;
+    // real mu_pop;
+    // real<lower=0> sigma_pop;
+}
+
+model {
+    vector[N] beta_research_area;
+    for (k in 1:N) {
+        beta_research_area[k] = tau*beta_x[primary_research_area[k]+1];
+    }
+
+    beta_soc_cap ~ normal(0, 1);
+    beta_soc_div ~ normal(0, 1);
+    beta_int_div ~ normal(0, 1);
+    beta_x ~ double_exponential(0, 1);
+    beta_stable ~ normal(0, 1);
+
+    mu ~ normal(0, 1);
+    tau ~ exponential(1);
+    sigma ~ pareto(1,1.5);
+
+    m ~ bernoulli_logit(beta_soc_cap*z_soc_cap + beta_soc_div*z_res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu);
+
+    // m ~ beta(mu_pop, sigma_pop);
+    // for (k in 1:N) {
+    //    m[k] ~ normal(mu_x[primary_research_area[k]+1], sigma_x[primary_research_area[k]+1]);
+    // }
+    // mu_x ~ normal(0, 1);
+    // mu_pop ~ normal(0, 1);
+    // sigma_x ~ exponential(1);
+    // sigma_pop ~ exponential(1);
+}
+
+generated quantities {
+    // real R2 = 0;
+    // {
+    //     vector[N] beta_research_area;
+    //     for (k in 1:N) {
+    //         beta_research_area[k] = beta_x[primary_research_area[k]+1];
+    //     }
+    //     vector[N] pred = inv_logit(beta_soc_cap*z_soc_cap + beta_soc_div*z_res_soc_div + beta_int_div*z_int_div + beta_stable*stable + beta_research_area + mu);
+
+    //     R2 = mean(square(z_m-pred));
+    //     R2 = 1-R2;
+    // }
+    
+}
+

+ 421 - 0
code/optimal_transport.py

@@ -0,0 +1,421 @@
+from calendar import c
+import numpy as np
+import pandas as pd
+from scipy.stats import norm
+from scipy.special import softmax
+import cvxpy as cp
+import ot
+from sklearn.linear_model import LinearRegression
+from scipy.linalg import logm
+
+from matplotlib import pyplot as plt
+import matplotlib
+matplotlib.use("pgf")
+matplotlib.rcParams.update(
+    {
+        "pgf.texsystem": "xelatex",
+        "font.family": "serif",
+        "font.serif": "Times New Roman",
+        "text.usetex": True,
+        "pgf.rcfonts": False,
+        'mathtext.default': 'regular',
+    }
+)
+plt.rcParams["text.latex.preamble"].join([
+        r"\usepackage{amsmath}",              
+        r"\usepackage{bm}",              
+        r"\setmainfont{amssymb}",
+])
+import seaborn as sns
+
+import argparse
+from os.path import join as opj, exists
+import pickle
+
+from cmdstanpy import CmdStanModel
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input")
+parser.add_argument("--suffix", default=None)
+parser.add_argument("--model", default="knowledge", choices=["knowledge", "identity", "random", "etm", "linguistic", "linguistic_symmetric"])
+parser.add_argument("--prior", default="bounded", choices=["bounded"])
+parser.add_argument("--steps", default=1000000, type=int)
+parser.add_argument("--burnin", default=50000, type=int)
+parser.add_argument("--thin", default=100, type=int)
+parser.add_argument("--alpha-prior", default=5, type=float)
+args = parser.parse_args()
+
+suffix = f"_{args.suffix}" if args.suffix is not None else ""
+
+samples = np.load(opj(args.input, f"ei_samples{suffix}.npz"))
+topics = pd.read_csv(opj(args.input, "topics.csv"))
+junk = topics["label"].str.contains("Junk")
+topics = topics[~junk]["label"].tolist()
+
+fig, ax = plt.subplots()
+
+n_topics = len(pd.read_csv(opj(args.input, "topics.csv")))
+df = pd.read_csv(opj(args.input, "aggregate.csv"))
+
+resources = pd.read_parquet(opj(args.input, "pooled_resources.parquet"))
+df = df.merge(resources, left_on="bai", right_on="bai")
+
+NR = np.stack(df[[f"start_{k+1}" for k in range(n_topics)]].values).astype(int)
+NC = np.stack(df[[f"end_{k+1}" for k in range(n_topics)]].values).astype(int)
+expertise = np.stack(df[[f"expertise_{k+1}" for k in range(n_topics)]].values)
+S = np.stack(df["pooled_resources"])
+
+# junk = np.sum(NR + NC, axis=0) == 0
+
+N = NR.shape[0]
+NR = NR[:,~junk]
+NC = NC[:,~junk]
+expertise = expertise[:,~junk]
+S = S[:,~junk]
+
+x = NR/NR.sum(axis=1)[:,np.newaxis]
+y = NC/NC.sum(axis=1)[:,np.newaxis]
+S_distrib = S/S.sum(axis=1)[:,np.newaxis]
+print(S_distrib)
+
+
+R = np.array([
+    [((expertise[:,i]>expertise[:,i].mean())&(expertise[:,j]>expertise[:,j].mean())).mean()/(expertise[:,i]>expertise[:,i].mean()).mean() for j in range(len(topics))]
+    for i in range(len(topics))
+])
+
+K = expertise.shape[1]
+
+# observed couplings
+theta = samples["beta"].mean(axis=0)
+theta = np.einsum("ai,aij->ij", x, theta)
+
+order = np.load(opj(args.input, "topics_order.npy"))
+
+def mcmc_bounded(T, x, alpha_prior, sigma, steps=1000):
+    # x = x/x.std()
+    T = T/T.sum()
+    m = T.shape[0]
+    n = T.shape[1]
+
+    K = np.zeros((steps+1, m, n))
+
+    lambd = m*n*3
+    # Transform K in a way that sends C to the prior support
+    # while preserving cross-ratios
+    Dc = cp.Variable(m)
+    prob = cp.Problem(
+        cp.Minimize(cp.sum(cp.abs(Dc))),
+        [
+            m*cp.sum(Dc)==-np.sum(np.log(T))-lambd, # C sums to m*n*lambda
+            Dc <= -np.log(np.max(T, axis=0)) # C is positive
+        ]
+    )
+    prob.solve(verbose=True)
+
+    K[0] = T@np.diag(np.exp(Dc.value))
+    beta = np.random.randn(steps+1)
+
+    C = np.zeros((steps+1, m, n))
+    C[0] = -np.log(K[0])/lambd
+    
+    accepted = np.array([False]*(steps+1))
+    accepted[0] = True
+
+    oob = np.array([False]*(steps+1))
+
+    beta_prior = norm(loc=0,scale=1)
+    
+    for i in range(steps):
+        Dr = np.random.randn(m)*sigma
+        Dr = np.exp(Dr)
+        Dc = 1/Dr # preserve the sum of C
+
+        beta[i+1] = np.random.randn()*sigma+beta[i]
+        K[i+1] = np.diag(Dr)@K[i]@np.diag(Dc)
+        C[i+1] = -np.log(K[i+1])/lambd
+        
+        distrib_prop = softmax(x.flatten()*beta[i+1])
+        distrib_prev = softmax(x.flatten()*beta[i])
+
+        oob[i+1] = np.abs(C[i+1].sum()-1)>1e-6 or np.any(C[i+1]<0)
+
+        if not oob[i+1]:
+            p_prop = beta_prior.logpdf(beta[i+1])
+            p_prev = beta_prior.logpdf(beta[i])
+
+            p_prop += -alpha_prior*(C[i+1].flatten()*np.log(C[i+1].flatten()/distrib_prop)).sum() - 0.5*np.log(C[i+1].flatten()).sum()
+            p_prev += -alpha_prior*(C[i].flatten()*np.log(C[i].flatten()/distrib_prev)).sum() - 0.5*np.log(C[i].flatten()).sum()
+
+            a = p_prop-p_prev
+            u = np.random.uniform(0, 1)
+
+        if oob[i+1] or a <= np.log(u):
+            C[i+1] = C[i]
+            K[i+1] = K[i]
+            beta[i+1] = beta[i]
+            accepted[i+1] = False
+        else:
+            accepted[i+1] = True
+
+        if i % 1000 == 0:
+            print(f"step {i}/{steps}, rate={accepted[:i].mean():.3f}, oob={oob[:i].mean():.3f}, acc={accepted[:i].sum():.0f}")
+            print(f"beta: {beta[:i].mean():.2f}, beta batch: {beta[i-1000:i].mean():.2f}, std batch: {beta[i-1000:i].std():.2f}")
+
+    return C, beta, accepted
+
+
+output = opj(args.input, f"cost_{args.model}_{args.prior}.npz")
+
+if args.model == "knowledge":
+    matrix = 1-np.load(opj(args.input, "nu_expertise.npy"))
+elif args.model == "etm":
+    matrix = 1-np.load(opj(args.input, "nu_etm.npy"))
+elif args.model == "identity":
+    matrix = 1-np.eye(K)
+elif args.model == "random":
+    matrix = np.random.uniform(0, 1, size=(K,K))
+elif args.model == "linguistic":
+    matrix = np.load(opj(args.input, "nu_ling.npy"))
+elif args.model == "linguistic_symmetric":
+    matrix = np.load(opj(args.input, "nu_ling_symmetric.npy"))
+
+    fig, ax = plt.subplots()
+    sns.heatmap(
+        matrix[:, order][order],
+        cmap="Reds",
+        vmin=0,
+        vmax=+np.max(np.abs(matrix)),
+        xticklabels=[topics[i] for i in order],
+        yticklabels=[topics[i] for i in order],
+        ax=ax,
+    )
+    fig.savefig(opj(args.input, f"linguistic_gap_{args.model}_{args.prior}.eps"), bbox_inches="tight")
+
+matrix_sd = 1
+
+if not exists(output):
+    if args.model in ["knowledge", "etm"]:
+        C, beta, accepted = mcmc_bounded(theta, matrix, args.alpha_prior*K*K, 0.1, steps=args.steps)
+    else:
+        C, beta, accepted = mcmc_bounded(theta, matrix/matrix.std(), args.alpha_prior*K*K, 0.1, steps=args.steps)
+
+    C = C[args.burnin::args.thin]
+    beta = beta[args.burnin::args.thin]
+    accepted = accepted[args.burnin::args.thin]
+    np.savez_compressed(output, C=C, beta=beta)
+
+else:
+    samples = np.load(output)
+    C = samples["C"]
+    beta = samples["beta"]
+
+print(beta.mean())
+print(beta.std())
+
+res = C-np.einsum("s,ij->sij", beta, matrix/matrix_sd)
+delta = res.mean(axis=0)
+res = (res**2).mean(axis=(1,2))
+var = np.array([C[s].flatten().var() for s in range(C.shape[0])])
+res = res/var 
+res = 1-res
+print(res.mean())
+
+fig, ax = plt.subplots()
+sns.heatmap(
+    C.mean(axis=0)[:, order][order],
+    xticklabels=[topics[i] for i in order],
+    yticklabels=[topics[i] for i in order],
+    cmap="Reds",
+    vmin=+np.min(C.mean(axis=0)),
+    vmax=+np.max(C.mean(axis=0)),
+    ax=ax,
+)
+fig.savefig(opj(args.input, f"cost_matrix_{args.model}_{args.prior}.eps"), bbox_inches="tight")
+
+pearson = np.corrcoef(C.mean(axis=0).flatten(), matrix.flatten())[0,1]
+print("R:", pearson)
+print("R^2:", pearson**2)
+
+reg = LinearRegression()
+fit = reg.fit(matrix.flatten().reshape(-1, 1),C.mean(axis=0).flatten())
+
+if args.model == "knowledge":
+    fig, ax = plt.subplots(figsize=(0.75*4.8,0.75*3.2))
+    xs = np.linspace(0, 1, 4)
+    ax.plot(1-xs, fit.predict(xs.reshape(-1, 1)), color="black")
+    ax.scatter(1-matrix.flatten(), C.mean(axis=0).flatten(), s=4)
+    
+    # error bars are boring as they only reflect the degeneracy of the cost matrix
+    # low = np.quantile(C, q=0.05/2, axis=0)
+    # up = np.quantile(C, q=1-0.05/2, axis=0)
+    # mean = C.mean(axis=0)
+    # ax.errorbar(
+    #     1-matrix.flatten(),
+    #     mean.flatten(), 
+    #     (np.maximum(mean.flatten()-low.flatten(), 0), np.maximum(up.flatten()-mean.flatten(), 0)),
+    #     ls="none",
+    #     lw=0.5
+    # )
+    ax.set_xlabel("Fraction of physicists with expertise in $k'$\namong those with expertise in $k$ ($\\nu_{k,k'}$)")
+    # pearson = np.corrcoef(softmax(np.einsum("s,i->si", beta, (1-matrix.flatten())/matrix.std()), axis=1).mean(axis=0), C.mean(axis=0).flatten())[0,1]
+    ax.text(0.95, 0.95, f"$R={-pearson:.2f}$", ha="right", va="top", transform=ax.transAxes)
+    ax.set_ylabel("Cost of shifting attention\nfrom $k$ to $k'$ ($C_{k,k'}$)")
+    fig.savefig(opj(args.input, f"cost_vs_nu_{args.model}.eps"), bbox_inches="tight")
+elif args.model == "identity":
+    fig, ax = plt.subplots(figsize=(0.75*4.8,0.75*3.2))
+    ax.axline((0,0), slope=-beta.mean(axis=0)/matrix_sd, color="black")
+    ax.scatter((1-matrix).flatten(), C.mean(axis=0).flatten(), s=4)
+    ax.set_xlabel("1 if $k=k'$, 0 otherwise")
+    ax.text(0.95, 0.95, f"$R={-pearson:.2f}$", ha="right", va="top", transform=ax.transAxes)
+    ax.set_ylabel("Cost of shifting attention\nfrom $k$ to $k'$ ($C_{k,k'}$)")
+    fig.savefig(opj(args.input, f"cost_vs_nu_{args.model}.eps"), bbox_inches="tight")
+elif args.model == "linguistic":
+    fig, ax = plt.subplots(figsize=(0.75*4.8,0.75*3.2))
+    ax.axline((0,0), slope=beta.mean(axis=0)/matrix_sd, color="black")
+    ax.scatter(matrix.flatten(), C.mean(axis=0).flatten(), s=4)
+    ax.set_xlabel("Linguistic gap from $k$ to $k'$\n$\\Delta_{k,k'}=H(\\varphi_{k'}+\\varphi_k)-H(\\varphi_k)$")
+    ax.text(0.05, 0.95, f"$R={pearson:.2f}$", ha="left", va="top", transform=ax.transAxes)
+    ax.set_ylabel("Cost of shifting attention\nfrom $k$ to $k'$ ($C_{k,k'}$)")
+    fig.savefig(opj(args.input, f"cost_vs_nu_{args.model}.eps"), bbox_inches="tight")
+elif args.model == "linguistic_symmetric":
+    fig, ax = plt.subplots(figsize=(0.75*4.8,0.75*3.2))        
+    ax.scatter(matrix.flatten(), C.mean(axis=0).flatten(), s=4)
+    ax.set_xlabel("Linguistic gap from $k$ to $k'$\n$\\Delta_{k,k'}=H(\\varphi_{k'}+\\varphi_k)-H(\\varphi_k)$")
+    pearson = np.corrcoef(softmax(np.einsum("s,i->si", beta, matrix.flatten()/matrix.std()), axis=1).mean(axis=0), C.mean(axis=0).flatten())[0,1]
+    ax.text(0.05, 0.95, f"$R={pearson:.2f}$", ha="left", va="top", transform=ax.transAxes)
+    ax.set_ylabel("Cost of shifting attention\nfrom $k$ to $k'$ ($C_{k,k'}$)")
+    fig.savefig(opj(args.input, f"cost_vs_nu_{args.model}.eps"), bbox_inches="tight")
+
+# predicted transfers
+origin = x.mean(axis=0)
+target = y.mean(axis=0)
+
+fig, ax = plt.subplots()
+shifts = theta[:, order][order]/theta.sum()
+sig = shifts>origin[order]*target[order]
+shifts = shifts/shifts.sum(axis=1)[:,np.newaxis]
+sns.heatmap(
+    shifts,
+    xticklabels=[topics[i] for i in order],
+    yticklabels=[topics[i] for i in order],
+    cmap="Blues",
+    vmin=0,
+    ax=ax,
+    annot=[[f"\\textbf{{{shifts[i,j]:.2f}}}" if sig[i,j] else "" for j in range(len(topics))] for i in range(len(topics))],
+    fmt="",
+    annot_kws={"fontsize": 6},
+)
+fig.savefig(opj(args.input, f"cost_matrix_true_couplings_{args.model}_{args.prior}.eps"), bbox_inches="tight")
+
+T = ot.sinkhorn(
+    origin,
+    target,
+    softmax(np.einsum("s,i->si", beta, matrix.flatten() if args.model in ["knowledge", "etm"] else matrix.flatten()/matrix.std()), axis=1).reshape((len(beta), K, K)).mean(axis=0),
+    1/(3*K*K)
+)
+
+shifts = T[:, order][order]
+sig = shifts>origin[order]*target[order]
+shifts = shifts/shifts.sum(axis=1)[:,np.newaxis]
+
+fig, ax = plt.subplots()
+sns.heatmap(
+    shifts,
+    xticklabels=[topics[i] for i in order],
+    yticklabels=[topics[i] for i in order],
+    cmap="Blues",
+    vmin=0,
+    ax=ax,
+    annot=[[f"\\textbf{{{shifts[i,j]:.2f}}}" if sig[i,j] else "" for j in range(len(topics))] for i in range(len(topics))],
+    fmt="",
+    annot_kws={"fontsize": 6},
+)
+fig.savefig(opj(args.input, f"cost_matrix_predicted_couplings_{args.model}_{args.prior}.eps"), bbox_inches="tight")
+
+
+T_baseline = ot.sinkhorn(
+    origin,
+    target,
+    (1-np.identity(K))/K,
+    50/(10*K*K)
+)
+
+
+fig, ax = plt.subplots()
+shifts = T_baseline[:, order][order]
+sig = shifts>origin[order]*target[order]
+shifts = shifts/shifts.sum(axis=1)[:,np.newaxis]
+
+fig, ax = plt.subplots()
+sns.heatmap(
+    shifts,
+    xticklabels=[topics[i] for i in order],
+    yticklabels=[topics[i] for i in order],
+    cmap="Blues",
+    vmin=0,
+    ax=ax,
+    annot=[[f"\\textbf{{{shifts[i,j]:.2f}}}" if sig[i,j] else "" for j in range(len(topics))] for i in range(len(topics))],
+    fmt="",
+    annot_kws={"fontsize": 6},
+)
+fig.savefig(opj(args.input, f"cost_matrix_predicted_couplings_identity.eps"), bbox_inches="tight")
+
+def tv_dist(x, y):
+    return  np.abs(y/y.sum()-x/x.sum()).sum()/2
+
+lambdas = np.logspace(np.log10(1/(5*10*K*K)), np.log10(100/(K*K)), 200)
+perf = []
+baseline = []
+for l in lambdas:
+    T = ot.sinkhorn(
+        origin,
+        target,
+        softmax(np.einsum("s,i->si", beta, matrix.flatten() if args.model == "knowledge" else matrix.flatten()/matrix.std()), axis=1).reshape((len(beta), K, K)).mean(axis=0),
+        l
+    )
+    
+    T_baseline = ot.sinkhorn(
+        origin,
+        target,
+        (1-np.identity(K))/K,
+        l
+    )
+
+    perf.append(tv_dist(T.flatten(), theta.flatten()))
+    baseline.append(tv_dist(T_baseline.flatten(), theta.flatten()))
+
+fig, ax = plt.subplots()
+ax.plot(lambdas, perf, label=f"{args.model} ({np.min(perf):.3f})")
+ax.plot(lambdas, baseline, label=f"baseline ({np.min(baseline):.3f})")
+ax.set_xscale("log")
+fig.legend()
+fig.savefig(opj(args.input, f"performance_{args.model}_{args.prior}.eps"), bbox_inches="tight")
+
+# counterfactual
+
+
+T = ot.sinkhorn(
+    origin,
+    target,
+    C.mean(axis=0)/C.mean(axis=0).sum(),
+    1/(3*K*K)
+)
+
+shifts = T[:, order][order]
+sig = shifts>origin[order]*target[order]
+shifts = shifts/shifts.sum(axis=1)[:,np.newaxis]
+
+fig, ax = plt.subplots()
+sns.heatmap(
+    shifts,
+    xticklabels=[topics[i] for i in order],
+    yticklabels=[topics[i] for i in order],
+    cmap="Blues",
+    vmin=0,
+    ax=ax,
+    annot=[[f"\\textbf{{{shifts[i,j]:.2f}}}" if sig[i,j] else "" for j in range(len(topics))] for i in range(len(topics))],
+    fmt="",
+    annot_kws={"fontsize": 6},
+)
+fig.savefig(opj(args.input, f"cost_matrix_counterfactual_couplings_{args.model}_{args.prior}.eps"), bbox_inches="tight")

+ 72 - 0
code/topic_distance.py

@@ -0,0 +1,72 @@
+import pandas as pd
+import pickle
+import numpy as np
+from scipy.stats import entropy
+
+from os.path import join as opj
+
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input")
+args = parser.parse_args()
+
+with open(opj(args.input, "dataset.pickle"), "rb") as handle:
+    data = pickle.load(handle)
+
+with open(opj(args.input, "etm_instance.pickle"), "rb") as handle:
+    etm_instance = pickle.load(handle)
+
+p_w_z = etm_instance.get_topic_word_dist()
+n_topics = p_w_z.shape[0]
+
+topics = pd.read_csv(opj(args.input, "topics.csv"))
+n_topics = len(topics)
+junk = np.array(topics["label"].str.contains("Junk"))
+
+df = pd.read_csv(opj(args.input, "aggregate.csv"))
+expertise = np.stack(df[[f"expertise_{k+1}" for k in range(n_topics)]].values)
+
+expertise = expertise[:,~junk]
+p_w_z = p_w_z[~junk,:]
+theta = etm_instance.get_document_topic_dist()[:,~junk]
+
+n_topics -= junk.sum()
+
+R = np.array([
+    [((expertise[:,i]>expertise[:,i].mean())&(expertise[:,j]>expertise[:,j].mean())).mean()/(expertise[:,i]>expertise[:,i].mean()).mean() for j in range(n_topics)]
+    for i in range(n_topics)
+])
+
+np.save(opj(args.input, "nu_expertise.npy"), R)
+
+R = np.array([
+    [((expertise[:,i]>expertise[:,i].mean())&(expertise[:,j]>expertise[:,j].mean())).mean()/((expertise[:,i]>expertise[:,i].mean())|(expertise[:,j]>expertise[:,j].mean())).mean() for j in range(n_topics)]
+    for i in range(n_topics)
+])
+
+np.save(opj(args.input, "nu_expertise_symmetric.npy"), R)
+
+
+def dist(p,q):
+    return entropy(p+q)-entropy(p)
+
+def dist_symmetric(p,q):
+    return entropy(p+q)-0.5*(entropy(p)+entropy(q))
+
+distance = np.zeros((n_topics, n_topics))
+distance_symmetric = np.zeros((n_topics, n_topics))
+
+for i in range(n_topics):
+    for j in range(n_topics):
+        distance[i,j] = dist(p_w_z[i], p_w_z[j])
+        distance_symmetric[i,j] = dist_symmetric(p_w_z[i], p_w_z[j])
+
+V = len(pd.read_csv(opj(args.input, "ngrams.csv")))
+distance_symmetric /= np.log(V)
+
+np.save(opj(args.input, "nu_ling.npy"), distance)
+np.save(opj(args.input, "nu_ling_symmetric.npy"), distance_symmetric)
+
+np.save(opj(args.input, "nu_etm.npy"), np.corrcoef(theta.T))
+

+ 1 - 0
inspire-harvest

@@ -0,0 +1 @@
+Subproject commit 7c8b25105648538f2d173bb643bfc6c144c3d615

+ 579 - 0
output/acl_2002_2022/aggregate.csv

@@ -0,0 +1,579 @@
+,start_1,end_1,expertise_1,start_2,end_2,expertise_2,start_3,end_3,expertise_3,start_4,end_4,expertise_4,start_5,end_5,expertise_5,start_6,end_6,expertise_6,start_7,end_7,expertise_7,start_8,end_8,expertise_8,start_9,end_9,expertise_9,start_10,end_10,expertise_10,start_11,end_11,expertise_11,start_12,end_12,expertise_12,start_13,end_13,expertise_13,start_14,end_14,expertise_14,start_15,end_15,expertise_15,start_16,end_16,expertise_16,start_17,end_17,expertise_17,start_18,end_18,expertise_18,start_19,end_19,expertise_19,start_20,end_20,expertise_20,bai
+0,0.0,0.0,0.0,12.0,195.0,0.016154204200093093,120.0,37.0,0.16126824192974287,0.0,0.0,0.0,0.0,0.0,0.0,26.0,90.0,0.033129808613750235,0.0,0.0,0.0,15.0,112.0,0.01998740519672535,0.0,76.0,0.0,122.0,75.0,0.1306847739780412,10.0,56.0,0.009583002491580646,0.0,0.0,0.0,109.0,133.0,0.1421022369465816,6.0,134.0,0.009856802562768666,25.0,40.0,0.022314705801823504,175.0,294.0,0.18867562905566357,141.0,117.0,0.18837444897735672,0.0,0.0,0.0,45.0,280.0,0.05076253319825862,18.0,40.0,0.02710620704761383,"chen, k"
+1,0.0,0.0,0.0,15.0,297.0,0.05242334322453016,0.0,55.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,26.0,206.0,0.05934718100890208,0.0,0.0,0.0,24.0,289.0,0.06478733926805143,1.0,720.0,0.0019782393669634025,39.0,292.0,0.12462908011869436,14.0,161.0,0.030662710187932735,0.0,0.0,0.0,104.0,306.0,0.2823936696340257,30.0,349.0,0.08456973293768547,21.0,530.0,0.05489614243323442,18.0,64.0,0.037586547972304644,49.0,464.0,0.14886251236399606,0.0,0.0,0.0,22.0,37.0,0.05489614243323442,1.0,163.0,0.002967359050445104,"gurevych, i"
+2,0.0,0.0,0.0,0.0,0.0,0.0,9.0,8.0,0.021669741427274593,0.0,0.0,0.0,0.0,0.0,0.0,13.0,94.0,0.0345837359805558,0.0,0.0,0.0,31.0,3.0,0.09312132126855838,3.0,0.0,0.01229904243169639,5.0,1.0,0.026647925268675512,1.0,51.0,0.001757006061670913,0.0,0.0,0.0,45.0,39.0,0.14524583443146213,45.0,130.0,0.2644294122814724,48.0,23.0,0.11561099885794607,9.0,9.0,0.05534569094263376,2.0,8.0,0.005856686872236376,0.0,0.0,0.0,35.0,56.0,0.2070338809335559,4.0,8.0,0.016398723242261853,"gaizauskas, r"
+3,0.0,0.0,0.0,3.0,12.0,0.005705591479650057,60.0,11.0,0.4351464435146443,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,1.0,0.0,7.0,19.0,0.020920502092050212,0.0,5.0,0.0,0.0,0.0,0.0,2.0,6.0,0.003803727653100038,0.0,48.0,0.0,51.0,34.0,0.1692658805629517,1.0,11.0,0.005705591479650057,3.0,6.0,0.051350323316850514,0.0,0.0,0.0,34.0,484.0,0.28623050589577786,6.0,17.0,0.021871434005325215,"forcada, m"
+4,0.0,0.0,0.0,4.0,9.0,0.0036780071888322325,147.0,59.0,0.31421884142773554,0.0,0.0,0.0,0.0,0.0,0.0,24.0,10.0,0.034188748641645075,0.0,0.0,0.0,8.0,0.0,0.008484493856056175,8.0,1.0,0.016578896040569533,87.0,13.0,0.14433391847084065,23.0,69.0,0.06631558416227813,0.0,0.0,0.0,93.0,1.0,0.17139652818412324,12.0,0.0,0.021259996099083283,27.0,14.0,0.03205021037086573,12.0,0.0,0.03169494831285352,33.0,2.0,0.07655549054027695,0.0,0.0,0.0,16.0,3.0,0.037407004931873274,29.0,5.0,0.04183733177296665,"carroll, j"
+5,0.0,0.0,0.0,14.0,25.0,0.05143448782605792,13.0,9.0,0.04776059583848235,0.0,0.0,0.0,0.0,0.0,0.0,38.0,45.0,0.08730503323202299,0.0,0.0,0.0,0.0,1.0,0.0,0.0,23.0,0.0,14.0,3.0,0.049230152633512575,0.0,0.0,0.0,0.0,0.0,0.0,7.0,23.0,0.02204335192545339,4.0,8.0,0.01285862195651448,48.0,10.0,0.17450986940983934,44.0,43.0,0.13309508700444206,0.0,1.0,0.0,0.0,0.0,0.0,111.0,70.0,0.30970909455262013,31.0,9.0,0.11205370562105474,"kirchhoff, k"
+6,0.0,0.0,0.0,0.0,294.0,0.0,40.0,26.0,0.10832083958020988,0.0,0.0,0.0,0.0,0.0,0.0,16.0,154.0,0.05997001499250374,0.0,0.0,0.0,7.0,107.0,0.01967766116941529,0.0,129.0,0.0,14.0,17.0,0.0472263868065967,0.0,348.0,0.0,0.0,0.0,0.0,7.0,85.0,0.021739130434782608,25.0,99.0,0.07027736131934033,13.0,26.0,0.06371814092953522,40.0,101.0,0.13155922038980508,19.0,33.0,0.06446776611694152,0.0,0.0,0.0,108.0,95.0,0.38999250374812594,7.0,51.0,0.023050974512743624,"yang, m"
+7,0.0,0.0,0.0,1.0,126.0,0.000259051698083881,14.0,52.0,0.018738072828067394,0.0,0.0,0.0,0.0,0.0,0.0,3.0,28.0,0.009066809432935835,0.0,0.0,0.0,4.0,13.0,0.009066809432935835,3.0,101.0,0.005440085659761501,42.0,72.0,0.06219831270993982,4.0,26.0,0.0060445396219572225,0.0,0.0,0.0,10.0,144.0,0.010577944338425141,62.0,158.0,0.07283670244458454,26.0,44.0,0.05582564093707636,15.0,79.0,0.017226937922578088,0.0,81.0,0.0,0.0,0.0,0.0,448.0,1261.0,0.6976607631663029,33.0,53.0,0.03505832980735189,"koehn, p"
+8,0.0,0.0,0.0,0.0,17.0,0.0,61.0,15.0,0.3799472295514512,0.0,0.0,0.0,0.0,0.0,0.0,3.0,9.0,0.021108179419525065,0.0,0.0,0.0,0.0,12.0,0.0,7.0,19.0,0.03693931398416886,49.0,7.0,0.3007915567282322,0.0,8.0,0.0,0.0,0.0,0.0,0.0,21.0,0.0,0.0,39.0,0.0,11.0,8.0,0.079155672823219,0.0,206.0,0.0,14.0,40.0,0.1108179419525066,0.0,0.0,0.0,10.0,12.0,0.06596306068601582,1.0,35.0,0.005277044854881266,"briscoe, t"
+9,0.0,0.0,0.0,3.0,26.0,0.004807658954524958,5.0,4.0,0.009259195023529548,0.0,0.0,0.0,0.0,0.0,0.0,0.0,97.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,13.0,10.0,0.07417530958409935,3.0,11.0,0.01923063581809983,0.0,0.0,0.0,5.0,31.0,0.03205105969683305,26.0,43.0,0.06980008556199198,1.0,57.0,0.0016025529848416526,23.0,34.0,0.04807658954524958,0.0,16.0,0.0,0.0,0.0,0.0,235.0,201.0,0.6483540878976032,18.0,21.0,0.09264282493322697,"banchs, r"
+10,0.0,0.0,0.0,0.0,43.0,0.0,25.0,77.0,0.09123667867821818,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.012880472283983745,0.0,0.0,0.0,2.0,0.0,0.008586981522655829,12.0,1.0,0.03864141685195123,1.0,11.0,0.004293490761327914,0.0,1.0,0.0,0.0,0.0,0.0,9.0,14.0,0.0354212987809553,8.0,8.0,0.012880472283983745,6.0,0.0,0.01771064939047765,23.0,7.0,0.047765084719773046,0.0,0.0,0.0,0.0,0.0,0.0,228.0,292.0,0.7169362876638808,4.0,18.0,0.0136471670627923,"de gispert, a"
+11,0.0,0.0,0.0,4.0,17.0,0.008665511265164644,4.0,0.0,0.006547275178124398,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,7.0,0.0,12.0,8.0,0.07328546669967814,0.0,54.0,0.0,0.0,0.0,0.0,2.0,2.0,0.01386481802426343,10.0,1.0,0.02002695936838051,1.0,22.0,0.0017331022530329288,22.0,1.0,0.0415944540727903,0.0,0.0,0.0,0.0,0.0,0.0,229.0,139.0,0.7329371956754973,18.0,0.0,0.10134521746306842,"lambert, p"
+12,0.0,0.0,0.0,3.0,0.0,0.005483748953498125,11.0,14.0,0.05808711854446163,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0077991096227528894,0.0,2.0,0.0,3.0,2.0,0.00710276054929281,0.0,0.0,0.0,0.0,0.0,0.0,10.0,0.0,0.05849332217064667,15.0,9.0,0.038183140861394356,2.0,0.0,0.004752582426365042,23.0,7.0,0.048744435142205556,0.0,1.0,0.0,0.0,0.0,0.0,249.0,107.0,0.7692647345090027,1.0,0.0,0.002089047220380238,"mari{\~n}o, j"
+13,0.0,0.0,0.0,0.0,9.0,0.0,76.0,55.0,0.6135593220338984,0.0,0.0,0.0,0.0,0.0,0.0,5.0,26.0,0.04067796610169492,0.0,0.0,0.0,0.0,6.0,0.0,0.0,1.0,0.0,6.0,9.0,0.061016949152542375,0.0,6.0,0.0,0.0,0.0,0.0,0.0,26.0,0.0,1.0,23.0,0.006779661016949152,2.0,6.0,0.02033898305084746,15.0,0.0,0.15254237288135594,0.0,22.0,0.0,0.0,0.0,0.0,0.0,49.0,0.0,11.0,4.0,0.10508474576271186,"menzel, w"
+14,0.0,0.0,0.0,0.0,48.0,0.0,7.0,23.0,0.060034305317324184,0.0,0.0,0.0,0.0,0.0,0.0,0.0,14.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,23.0,0.0,10.0,80.0,0.08404802744425387,0.0,109.0,0.0,0.0,0.0,0.0,18.0,6.0,0.10806174957118353,1.0,1.0,0.006003430531732418,4.0,152.0,0.0330188679245283,0.0,44.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,108.0,142.0,0.6668096054888508,7.0,38.0,0.04202401372212692,"sadat, f"
+15,0.0,0.0,0.0,2.0,51.0,0.01382852627419992,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,5.0,0.020742789411299884,0.0,0.0,0.0,1.0,2.0,0.002765705254839984,0.0,3.0,0.0,6.0,12.0,0.03318846305807981,0.0,1.0,0.0,0.0,0.0,0.0,14.0,31.0,0.07605689450809956,0.0,92.0,0.0,6.0,3.0,0.016594231529039907,5.0,9.0,0.03318846305807981,0.0,8.0,0.0,0.0,0.0,0.0,158.0,394.0,0.7649150533386012,6.0,44.0,0.03871987356775978,"foster, g"
+16,0.0,0.0,0.0,0.0,11.0,0.0,0.0,17.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.024765611179904477,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,14.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,14.0,32.0,0.11350905124122884,0.0,50.0,0.0,1.0,30.0,0.004953122235980895,2.0,28.0,0.024765611179904477,0.0,8.0,0.0,0.0,0.0,0.0,131.0,326.0,0.7866029836664898,5.0,18.0,0.04540362049649154,"kuhn, r"
+17,0.0,0.0,0.0,4.0,40.0,0.013095535046146172,7.0,1.0,0.03863182838613121,0.0,0.0,0.0,0.0,0.0,0.0,7.0,18.0,0.017678972312297335,0.0,0.0,0.0,1.0,67.0,0.006547767523073086,2.0,8.0,0.008730356697430781,8.0,0.0,0.028810177101521582,0.0,22.0,0.0,0.0,0.0,0.0,17.0,2.0,0.10803816413070592,22.0,3.0,0.07137066600149665,9.0,25.0,0.032302319780493895,9.0,2.0,0.03055624844100773,143.0,38.0,0.45878024444998766,0.0,0.0,0.0,46.0,2.0,0.14136941880768272,12.0,0.0,0.04408830132202545,"martin, j"
+18,0.0,0.0,0.0,1.0,283.0,0.0018353113911660345,10.0,39.0,0.018353113911660345,0.0,0.0,0.0,0.0,0.0,0.0,31.0,395.0,0.06411354459806681,0.0,0.0,0.0,36.0,266.0,0.07402422611036338,5.0,188.0,0.014927199314817079,26.0,22.0,0.06080998409396794,27.0,431.0,0.058729964517313105,0.0,0.0,0.0,54.0,101.0,0.12418940413556831,37.0,180.0,0.08895142542518047,5.0,62.0,0.014682491129328276,78.0,70.0,0.1965006729475101,18.0,87.0,0.03854153921448673,0.0,0.0,0.0,12.0,140.0,0.023247277621436437,85.0,151.0,0.22109384558913492,"zhu, x"
+19,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.005876591576885407,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,86.0,34.0,0.6072477962781586,0.0,8.0,0.0,0.0,0.0,0.0,2.0,1.0,0.029382957884427033,3.0,0.0,0.026444662095984332,2.0,12.0,0.029382957884427033,0.0,0.0,0.0,13.0,82.0,0.11802154750244859,0.0,0.0,0.0,25.0,20.0,0.18364348677766895,0.0,0.0,0.0,"takeuchi, k"
+20,0.0,0.0,0.0,0.0,7.0,0.0,26.0,25.0,0.1219430485762144,0.0,0.0,0.0,0.0,0.0,0.0,0.0,16.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,5.0,0.0,5.0,1.0,0.023450586264656615,0.0,2.0,0.0,0.0,0.0,0.0,23.0,23.0,0.0726968174204355,5.0,0.0,0.018760469011725293,0.0,2.0,0.0,2.0,0.0,0.009380234505862646,1.0,5.0,0.0031267448352875486,0.0,0.0,0.0,225.0,35.0,0.6529313232830821,25.0,1.0,0.0977107761027359,"zhao, b"
+21,0.0,0.0,0.0,10.0,48.0,0.009585503211856217,64.0,64.0,0.05480407271126489,0.0,0.0,0.0,0.0,0.0,0.0,31.0,8.0,0.026798995804425418,0.0,0.0,0.0,0.0,2.0,0.0,12.0,20.0,0.008573369332343449,25.0,3.0,0.02837763599083125,1.0,13.0,0.0006251415138167098,0.0,0.0,0.0,44.0,7.0,0.035424685782946885,14.0,16.0,0.015420157340812176,18.0,51.0,0.017433600138559498,56.0,75.0,0.04155702634705367,6.0,27.0,0.005001132110533678,0.0,0.0,0.0,743.0,572.0,0.7049286950779802,47.0,77.0,0.05146998463757577,"vogel, s"
+22,0.0,0.0,0.0,4.0,704.0,0.006909819009854345,50.0,202.0,0.09587373876172904,0.0,0.0,0.0,0.0,0.0,0.0,32.0,643.0,0.04715166267519925,0.0,0.0,0.0,59.0,434.0,0.11487574103882847,1.0,866.0,0.0025911821286953795,85.0,66.0,0.14337874445447765,0.0,427.0,0.0,0.0,0.0,0.0,73.0,264.0,0.14251501707824588,12.0,211.0,0.01727454752463586,13.0,216.0,0.024616230222606103,65.0,307.0,0.12453378351851127,38.0,340.0,0.05700600683129835,0.0,0.0,0.0,27.0,483.0,0.041027050371010176,56.0,447.0,0.18224647638490835,"li, x"
+23,0.0,0.0,0.0,1.0,75.0,0.002005548684694321,28.0,6.0,0.1332018584751145,0.0,0.0,0.0,0.0,0.0,0.0,6.0,7.0,0.025269913427148447,0.0,0.0,0.0,4.0,50.0,0.015208744192265268,18.0,27.0,0.12634956713574222,5.0,21.0,0.023398067988100412,0.0,13.0,0.0,0.0,0.0,0.0,19.0,58.0,0.09944178894942675,12.0,32.0,0.053481298258515234,22.0,28.0,0.0774141792292008,32.0,10.0,0.16659424407527496,41.0,76.0,0.2254570979710533,0.0,0.0,0.0,9.0,8.0,0.017080589631313304,6.0,58.0,0.03509710198215062,"grishman, r"
+24,0.0,0.0,0.0,0.0,104.0,0.0,35.0,23.0,0.06677157317228767,0.0,0.0,0.0,0.0,0.0,0.0,9.0,19.0,0.00944158550589036,0.0,0.0,0.0,0.0,123.0,0.0,15.0,10.0,0.024808970923886075,73.0,23.0,0.14616028012872312,1.0,61.0,0.0019847176739108863,0.0,0.0,0.0,18.0,64.0,0.03870199464126228,41.0,83.0,0.09824352485858888,34.0,21.0,0.05191454372758332,3.0,13.0,0.007442691277165824,171.0,236.0,0.33571499454202636,0.0,0.0,0.0,57.0,166.0,0.1627468492606927,28.0,47.0,0.056068274287982535,"m{\`a}rquez, l"
+25,0.0,0.0,0.0,0.0,284.0,0.0,0.0,54.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,29.0,49.0,0.10705462530881142,0.0,0.0,0.0,1.0,14.0,0.0036599871900448344,0.0,46.0,0.0,33.0,33.0,0.14090950681672615,0.0,9.0,0.0,0.0,0.0,0.0,42.0,24.0,0.14493549272577544,2.0,19.0,0.002744990392533626,4.0,34.0,0.011894958367645711,11.0,99.0,0.03934486229298197,0.0,18.0,0.0,0.0,0.0,0.0,136.0,1176.0,0.5220056729801446,5.0,15.0,0.02744990392533626,"utiyama, m"
+26,0.0,0.0,0.0,29.0,440.0,0.029037262854187645,56.0,155.0,0.04031682952720542,0.0,0.0,0.0,0.0,0.0,0.0,85.0,45.0,0.046451091936815556,0.0,0.0,0.0,18.0,17.0,0.014258236025295551,0.0,83.0,0.0,60.0,53.0,0.052314978633060234,3.0,6.0,0.0015052124445412232,0.0,0.0,0.0,57.0,54.0,0.054492501156808835,49.0,37.0,0.03345763357739732,8.0,39.0,0.005430196793598083,174.0,125.0,0.12265984376275893,16.0,22.0,0.015433190887068238,0.0,0.0,0.0,741.0,1896.0,0.5694003647350226,18.0,38.0,0.015242657666240235,"sumita, e"
+27,0.0,0.0,0.0,0.0,27.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,10.0,0.015771997786386275,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,69.0,38.0,0.3785279468732706,0.0,2.0,0.0,0.0,0.0,0.0,37.0,25.0,0.20475926950747092,6.0,4.0,0.03984504703929164,9.0,29.0,0.04842280022136137,3.0,12.0,0.016602102933038185,13.0,10.0,0.07055893746541228,0.0,0.0,0.0,42.0,57.0,0.20890979524073047,3.0,1.0,0.016602102933038185,"kageura, k"
+28,0.0,0.0,0.0,7.0,39.0,0.022920692045605387,6.0,2.0,0.017964866738447466,0.0,0.0,0.0,0.0,0.0,0.0,13.0,24.0,0.03531025531350019,0.0,0.0,0.0,1.0,30.0,0.0012389563267894803,0.0,42.0,0.0,10.0,14.0,0.022920692045605387,1.0,15.0,0.0024779126535789606,0.0,0.0,0.0,24.0,18.0,0.06070886001268454,10.0,6.0,0.02177023259930087,22.0,25.0,0.042655496393752115,10.0,5.0,0.03840764613047389,0.0,40.0,0.0,0.0,0.0,0.0,244.0,191.0,0.7001725689169455,10.0,12.0,0.03345182082331597,"langlais, p"
+29,0.0,0.0,0.0,7.0,72.0,0.016152551304943998,5.0,15.0,0.009192508872732357,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,1.0,15.0,0.003151717327793951,0.0,33.0,0.0,2.0,11.0,0.006303434655587902,0.0,1.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,4.0,24.0,0.015233300417670764,0.0,5.0,0.0,12.0,29.0,0.02127409196260917,1.0,7.0,0.003151717327793951,0.0,0.0,0.0,332.0,247.0,0.9241899421332418,1.0,4.0,0.001350735997625979,"fonollosa, j"
+30,0.0,0.0,0.0,31.0,81.0,0.07943006878480184,8.0,0.0,0.013647778141718529,0.0,0.0,0.0,0.0,0.0,0.0,0.0,22.0,0.0,0.0,0.0,0.0,1.0,7.0,0.0021836445026749644,12.0,48.0,0.05895840157222404,4.0,29.0,0.006987662408559886,0.0,0.0,0.0,0.0,0.0,0.0,1.0,11.0,0.0013101867016049788,0.0,10.0,0.0,0.0,28.0,0.0,7.0,78.0,0.017742111584234086,0.0,1.0,0.0,0.0,0.0,0.0,280.0,437.0,0.8197401463041817,0.0,8.0,0.0,"schwenk, h"
+31,0.0,0.0,0.0,0.0,94.0,0.0,17.0,5.0,0.07982261640798227,0.0,0.0,0.0,0.0,0.0,0.0,6.0,17.0,0.026607538802660757,0.0,0.0,0.0,0.0,5.0,0.0,0.0,17.0,0.0,19.0,11.0,0.0975609756097561,1.0,2.0,0.004434589800443459,0.0,0.0,0.0,0.0,10.0,0.0,0.0,37.0,0.0,27.0,19.0,0.1419068736141907,23.0,11.0,0.10199556541019957,3.0,0.0,0.011086474501108648,0.0,0.0,0.0,116.0,219.0,0.5365853658536587,0.0,1.0,0.0,"senellart, j"
+32,0.0,0.0,0.0,3.0,6.0,0.03383458646616542,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,9.0,0.048872180451127824,0.0,0.0,0.0,3.0,2.0,0.02631578947368421,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,38.0,3.0,0.2894736842105263,36.0,20.0,0.38533834586466165,4.0,7.0,0.04511278195488722,1.0,23.0,0.011278195488721806,0.0,0.0,0.0,22.0,51.0,0.15977443609022557,0.0,0.0,0.0,"estrella, p"
+33,0.0,0.0,0.0,7.0,7.0,0.024281564174602078,18.0,7.0,0.10715328932344083,0.0,0.0,0.0,0.0,0.0,0.0,27.0,19.0,0.11864082723019154,0.0,0.0,0.0,7.0,11.0,0.03238072982776659,0.0,1.0,0.0,28.0,14.0,0.16076179955494418,0.0,23.0,0.0,0.0,0.0,0.0,6.0,0.0,0.03935397868193381,0.0,18.0,0.0,19.0,81.0,0.09638272673680631,40.0,40.0,0.1648299706305665,25.0,5.0,0.18952047628404967,0.0,0.0,0.0,9.0,115.0,0.04183949312500332,4.0,1.0,0.024855144430695038,"bouillon, p"
+34,0.0,0.0,0.0,9.0,1991.0,0.006701437373050057,244.0,1350.0,0.2013571719225139,0.0,0.0,0.0,0.0,0.0,0.0,72.0,1161.0,0.05946702551746612,0.0,0.0,0.0,24.0,977.0,0.017551383596083485,9.0,1449.0,0.004467624915366705,132.0,220.0,0.09013255980049337,37.0,1109.0,0.028259500456724315,0.0,0.0,0.0,146.0,542.0,0.09172814012741005,38.0,821.0,0.026202974702031707,57.0,347.0,0.04344030756710303,201.0,644.0,0.13473789427296412,77.0,825.0,0.05758272113139309,0.0,0.0,0.0,312.0,702.0,0.1879863776274312,73.0,472.0,0.05038488098996896,"zhang, y"
+35,0.0,0.0,0.0,0.0,276.0,0.0,6.0,20.0,0.039500909147908964,0.0,0.0,0.0,0.0,0.0,0.0,6.0,169.0,0.02351244592137438,0.0,0.0,0.0,0.0,227.0,0.0,0.0,185.0,0.0,77.0,97.0,0.2762869145400965,43.0,183.0,0.17555959621292871,0.0,0.0,0.0,9.0,105.0,0.03401467176625494,16.0,200.0,0.061884757665057376,11.0,122.0,0.033074173929399965,4.0,157.0,0.015800363659163586,61.0,276.0,0.23722490438271993,0.0,0.0,0.0,2.0,52.0,0.010972474763308045,14.0,76.0,0.09216878801178757,"inui, k"
+36,0.0,0.0,0.0,20.0,24.0,0.11881956400494677,19.0,5.0,0.05635442178520332,0.0,0.0,0.0,0.0,0.0,0.0,20.0,48.0,0.05686364848808166,0.0,0.0,0.0,0.0,24.0,0.0,2.0,2.0,0.002909866873590533,11.0,4.0,0.029025922064065568,24.0,31.0,0.20369068115133732,0.0,0.0,0.0,14.0,75.0,0.10014791823274086,26.0,47.0,0.09556487790683575,2.0,21.0,0.006789689371711243,51.0,38.0,0.15567787773709352,6.0,14.0,0.02291520162952545,0.0,0.0,0.0,38.0,2.0,0.1155944615533839,9.0,20.0,0.03564586920148403,"gamon, m"
+37,0.0,0.0,0.0,0.0,30.0,0.0,15.0,29.0,0.08835341365461846,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0077108433734939755,0.0,4.0,0.0,3.0,8.0,0.011566265060240964,1.0,5.0,0.0048192771084337345,0.0,0.0,0.0,0.0,2.0,0.0,44.0,55.0,0.2014457831325301,0.0,12.0,0.0,11.0,13.0,0.04273092369477911,0.0,0.0,0.0,0.0,0.0,0.0,158.0,244.0,0.6321285140562249,2.0,2.0,0.011244979919678712,"vilar, d"
+38,0.0,0.0,0.0,15.0,43.0,0.045889101338432124,15.0,1.0,0.03839388145315488,0.0,0.0,0.0,0.0,0.0,0.0,5.0,34.0,0.012084130019120459,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.011472275334608031,9.0,2.0,0.023862332695984704,0.0,3.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,11.0,74.0,0.02523900573613767,2.0,0.0,0.006118546845124283,81.0,50.0,0.18294455066921608,1.0,4.0,0.0030592734225621415,0.0,0.0,0.0,286.0,355.0,0.6455831739961758,2.0,2.0,0.005353728489483747,"matusov, e"
+39,0.0,0.0,0.0,44.0,369.0,0.02794980510075162,89.0,44.0,0.05050167968141673,0.0,0.0,0.0,0.0,0.0,0.0,49.0,50.0,0.027717963399455064,0.0,0.0,0.0,10.0,5.0,0.006800689904699012,8.0,77.0,0.004250431190436882,36.0,57.0,0.02274624691609556,2.0,2.0,0.0007728056709885241,0.0,0.0,0.0,32.0,43.0,0.018289734213395067,125.0,45.0,0.05535396256107801,22.0,26.0,0.013330897824552044,197.0,180.0,0.09797536623377844,2.0,8.0,0.00128800945164754,0.0,0.0,0.0,1153.0,1829.0,0.6218884326212987,76.0,109.0,0.05113397523040734,"ney, h"
+40,0.0,0.0,0.0,0.0,112.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,32.0,0.03053435114503817,0.0,0.0,0.0,1.0,80.0,0.009541984732824428,0.0,225.0,0.0,10.0,42.0,0.1001908396946565,10.0,255.0,0.08110687022900763,0.0,0.0,0.0,15.0,34.0,0.20753816793893132,5.0,15.0,0.052480916030534355,22.0,43.0,0.17032442748091603,2.0,9.0,0.028625954198473285,17.0,28.0,0.1717557251908397,0.0,0.0,0.0,0.0,18.0,0.0,14.0,36.0,0.14790076335877864,"collier, n"
+41,0.0,0.0,0.0,63.0,121.0,0.18854868815883186,146.0,126.0,0.3815866307976359,0.0,0.0,0.0,0.0,0.0,0.0,52.0,53.0,0.07365415188531811,0.0,0.0,0.0,3.0,16.0,0.005771898617107097,0.0,171.0,0.0,0.0,52.0,0.0,1.0,46.0,0.0012826441371349105,0.0,0.0,0.0,4.0,66.0,0.005002312134826151,14.0,50.0,0.020596564538940067,8.0,22.0,0.008667974063585396,3.0,14.0,0.004489254479972187,21.0,100.0,0.022446272399860936,0.0,0.0,0.0,18.0,42.0,0.05483303686251743,139.0,59.0,0.23312057192427,"henderson, j"
+42,0.0,0.0,0.0,2.0,162.0,0.001308435671694798,230.0,124.0,0.15538608198284085,0.0,0.0,0.0,0.0,0.0,0.0,18.0,89.0,0.016149834576347225,0.0,0.0,0.0,5.0,12.0,0.0048599039234378215,13.0,23.0,0.007289855885156733,87.0,89.0,0.055963662872203224,1.0,194.0,0.0011215162900241127,0.0,0.0,0.0,27.0,121.0,0.02018729322043403,87.0,119.0,0.07645002710331035,43.0,189.0,0.03472962111441336,25.0,166.0,0.018467634909063727,10.0,34.0,0.004785136170769548,0.0,0.0,0.0,741.0,2141.0,0.5932821174227556,13.0,39.0,0.010018878857548742,"way, a"
+43,0.0,0.0,0.0,19.0,178.0,0.012433826794597373,249.0,433.0,0.20180520005388655,0.0,0.0,0.0,0.0,0.0,0.0,21.0,79.0,0.02078125545726246,0.0,0.0,0.0,11.0,6.0,0.008382355142425194,5.0,109.0,0.0034926479760104974,188.0,309.0,0.12597981249469864,46.0,43.0,0.030735302188892377,0.0,0.0,0.0,111.0,70.0,0.09220590656667714,31.0,22.0,0.020955887856062987,53.0,125.0,0.054240823067443024,162.0,444.0,0.1126229287350128,137.0,370.0,0.0856048018920173,0.0,0.0,0.0,122.0,218.0,0.08364891902545141,201.0,127.0,0.14711033274956217,"matsumoto, y"
+44,0.0,0.0,0.0,7.0,365.0,0.06009389671361503,0.0,46.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,21.0,262.0,0.12331768388106416,0.0,0.0,0.0,4.0,344.0,0.025039123630672924,0.0,254.0,0.0,5.0,39.0,0.03442879499217527,1.0,194.0,0.006259780907668231,0.0,0.0,0.0,10.0,157.0,0.06103286384976526,14.0,151.0,0.07042253521126761,16.0,79.0,0.08826291079812207,18.0,141.0,0.14553990610328638,31.0,201.0,0.2331768388106416,0.0,0.0,0.0,30.0,169.0,0.11956181533646322,6.0,117.0,0.03286384976525822,"chen, z"
+45,0.0,0.0,0.0,0.0,125.0,0.0,27.0,49.0,0.3230769230769231,0.0,0.0,0.0,0.0,0.0,0.0,11.0,23.0,0.06769230769230769,0.0,0.0,0.0,8.0,33.0,0.07384615384615384,0.0,97.0,0.0,12.0,1.0,0.15384615384615383,8.0,58.0,0.08000000000000002,0.0,0.0,0.0,14.0,30.0,0.08615384615384615,0.0,47.0,0.0,0.0,11.0,0.0,9.0,94.0,0.10153846153846155,5.0,52.0,0.04,0.0,0.0,0.0,0.0,10.0,0.0,6.0,30.0,0.07384615384615385,"zhou, q"
+46,0.0,0.0,0.0,2.0,148.0,0.020689655172413793,23.0,42.0,0.08965517241379309,0.0,0.0,0.0,0.0,0.0,0.0,8.0,29.0,0.08275862068965517,0.0,0.0,0.0,4.0,34.0,0.020689655172413793,0.0,64.0,0.0,1.0,13.0,0.005172413793103448,1.0,140.0,0.010344827586206896,0.0,0.0,0.0,17.0,46.0,0.17586206896551723,0.0,93.0,0.0,26.0,80.0,0.2482758620689655,20.0,32.0,0.1672413793103448,0.0,25.0,0.0,0.0,0.0,0.0,38.0,165.0,0.17931034482758618,0.0,4.0,0.0,"singh, a"
+47,0.0,0.0,0.0,0.0,13.0,0.0,48.0,176.0,0.35342465753424657,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,1.0,0.0,9.0,10.0,0.09315068493150684,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,7.0,0.0,12.0,17.0,0.08493150684931505,0.0,8.0,0.0,28.0,0.0,0.2273972602739726,0.0,0.0,0.0,15.0,2.0,0.24109589041095889,0.0,1.0,0.0,"husain, s"
+48,0.0,0.0,0.0,4.0,213.0,0.005706134094151212,50.0,134.0,0.06405135520684734,0.0,0.0,0.0,0.0,0.0,0.0,50.0,198.0,0.04778887303851639,0.0,0.0,0.0,8.0,63.0,0.008559201141226817,0.0,358.0,0.0,343.0,264.0,0.4739895387541606,9.0,338.0,0.008559201141226817,0.0,0.0,0.0,58.0,422.0,0.06861626248216832,5.0,378.0,0.008559201141226817,21.0,196.0,0.028245363766048498,13.0,58.0,0.016880646695197333,167.0,129.0,0.2204469805040418,0.0,0.0,0.0,22.0,75.0,0.029101283880171178,13.0,67.0,0.01949595815501664,"baldwin, t"
+49,0.0,0.0,0.0,0.0,0.0,0.0,1.0,15.0,0.004898359049718345,0.0,0.0,0.0,0.0,0.0,0.0,3.0,11.0,0.014695077149155033,0.0,0.0,0.0,1.0,3.0,0.002939015429831007,0.0,0.0,0.0,32.0,14.0,0.16164584864070536,0.0,6.0,0.0,0.0,0.0,0.0,8.0,11.0,0.030124908155767825,28.0,11.0,0.15429831006612785,6.0,30.0,0.023512123438648055,3.0,9.0,0.02204261572373255,4.0,0.0,0.011756061719324027,0.0,0.0,0.0,109.0,67.0,0.57408768062699,0.0,0.0,0.0,"babych, b"
+50,0.0,0.0,0.0,0.0,89.0,0.0,15.0,22.0,0.043868200428933524,0.0,0.0,0.0,0.0,0.0,0.0,0.0,46.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,13.0,12.0,0.03801910703840905,90.0,28.0,0.46597777344511604,0.0,136.0,0.0,0.0,0.0,0.0,33.0,22.0,0.10333398323259897,1.0,73.0,0.005849093390524469,4.0,35.0,0.023396373562097877,8.0,3.0,0.033144862546305325,0.0,12.0,0.0,0.0,0.0,0.0,94.0,580.0,0.2864106063560149,0.0,12.0,0.0,"naskar, s"
+51,0.0,0.0,0.0,0.0,36.0,0.0,48.0,37.0,0.34790089470061936,0.0,0.0,0.0,0.0,0.0,0.0,2.0,69.0,0.004129387474191328,0.0,0.0,0.0,0.0,8.0,0.0,0.0,44.0,0.0,11.0,24.0,0.14693737095664142,0.0,28.0,0.0,0.0,0.0,0.0,18.0,81.0,0.24569855471438404,0.0,10.0,0.0,2.0,26.0,0.004817618719889883,3.0,15.0,0.03131452167928424,27.0,36.0,0.1059876118375774,0.0,0.0,0.0,2.0,53.0,0.021679284239504475,13.0,6.0,0.09153475567790777,"yuret, d"
+52,0.0,0.0,0.0,0.0,119.0,0.0,56.0,66.0,0.17924675730582906,0.0,0.0,0.0,0.0,0.0,0.0,13.0,111.0,0.022190967338646665,0.0,0.0,0.0,1.0,110.0,0.001562744178777934,5.0,145.0,0.02344116268166901,72.0,190.0,0.2212845757149555,1.0,333.0,0.0023441162681669013,0.0,0.0,0.0,29.0,196.0,0.09142053445850915,6.0,87.0,0.005860290670417253,129.0,614.0,0.2554305360212533,23.0,119.0,0.031020471948741996,42.0,183.0,0.0617283950617284,0.0,0.0,0.0,33.0,177.0,0.05055477418346617,17.0,62.0,0.05391467416783873,"diab, m"
+53,0.0,0.0,0.0,4.0,35.0,0.016840332596568783,20.0,15.0,0.06483528049678981,0.0,0.0,0.0,0.0,0.0,0.0,28.0,15.0,0.10367329754762655,0.0,0.0,0.0,7.0,83.0,0.029470582043995365,0.0,86.0,0.0,7.0,6.0,0.03473318598042312,6.0,2.0,0.025260498894853173,0.0,0.0,0.0,13.0,5.0,0.015156299336911904,22.0,16.0,0.07146616145668877,17.0,18.0,0.058941164087990744,23.0,10.0,0.08841174613198612,13.0,50.0,0.0333649089569519,0.0,0.0,0.0,53.0,38.0,0.2662877591832439,41.0,5.0,0.1915587832859699,"roukos, s"
+54,0.0,0.0,0.0,0.0,106.0,0.0,12.0,63.0,0.07598371777476257,0.0,0.0,0.0,0.0,0.0,0.0,15.0,14.0,0.06687342508238031,0.0,0.0,0.0,2.0,91.0,0.009304128707113784,0.0,159.0,0.0,19.0,18.0,0.09885636751308395,1.0,5.0,0.007753440589261485,0.0,0.0,0.0,24.0,14.0,0.14498933901918978,2.0,32.0,0.008722620662919171,14.0,21.0,0.1163016088389223,14.0,10.0,0.07016863733281645,10.0,58.0,0.0610583446404342,0.0,0.0,0.0,22.0,33.0,0.15506881178522972,26.0,14.0,0.18491955805388643,"florian, r"
+55,0.0,0.0,0.0,0.0,21.0,0.0,91.0,0.0,0.33651856959905435,0.0,0.0,0.0,0.0,0.0,0.0,17.0,12.0,0.043443995665451686,0.0,0.0,0.0,0.0,1.0,0.0,26.0,101.0,0.1024529602994779,4.0,6.0,0.009851246182642106,0.0,15.0,0.0,0.0,0.0,0.0,23.0,10.0,0.05713722785932421,3.0,8.0,0.00935868387351,14.0,30.0,0.04236035858536106,30.0,0.0,0.09693626243719831,3.0,0.0,0.004433060782188947,0.0,0.0,0.0,81.0,201.0,0.2541621515121663,11.0,9.0,0.04334548320362527,"hassan, h"
+56,0.0,0.0,0.0,3.0,144.0,0.004117916203673443,112.0,203.0,0.3478266553369501,0.0,0.0,0.0,0.0,0.0,0.0,10.0,84.0,0.013268841100725537,0.0,0.0,0.0,26.0,57.0,0.05216027191319694,0.0,97.0,0.0,57.0,85.0,0.09443754493757761,2.0,91.0,0.0032028237139682327,0.0,0.0,0.0,48.0,99.0,0.06661873325053926,0.0,43.0,0.0,19.0,35.0,0.025348061964834304,55.0,168.0,0.06861886397803778,3.0,101.0,0.008235832407346886,0.0,0.0,0.0,47.0,81.0,0.18988169161383095,63.0,6.0,0.12628276357931892,"lee, y"
+57,0.0,0.0,0.0,0.0,87.0,0.0,81.0,113.0,0.32342657342657344,0.0,0.0,0.0,0.0,0.0,0.0,4.0,115.0,0.007867132867132868,0.0,0.0,0.0,1.0,34.0,0.005244755244755245,1.0,75.0,0.0034965034965034965,1.0,8.0,0.0034965034965034965,1.0,44.0,0.005244755244755245,0.0,0.0,0.0,38.0,27.0,0.13986013986013984,0.0,13.0,0.0,54.0,121.0,0.2701048951048951,1.0,102.0,0.0008741258741258741,4.0,20.0,0.01748251748251748,0.0,0.0,0.0,47.0,20.0,0.22290209790209792,0.0,17.0,0.0,"xia, f"
+58,0.0,0.0,0.0,0.0,0.0,0.0,45.0,3.0,0.15280898876404492,0.0,0.0,0.0,0.0,0.0,0.0,2.0,121.0,0.0067415730337078645,0.0,0.0,0.0,31.0,21.0,0.08112359550561797,22.0,3.0,0.07191011235955055,20.0,14.0,0.0502247191011236,2.0,0.0,0.00449438202247191,0.0,0.0,0.0,60.0,33.0,0.17011235955056178,3.0,1.0,0.007865168539325841,4.0,56.0,0.014831460674157304,4.0,0.0,0.00898876404494382,38.0,3.0,0.10449438202247191,0.0,0.0,0.0,72.0,6.0,0.22752808988764042,41.0,15.0,0.09887640449438201,"lin, d"
+59,0.0,0.0,0.0,0.0,48.0,0.0,80.0,106.0,0.23772915434654052,0.0,0.0,0.0,0.0,0.0,0.0,3.0,34.0,0.007096392667060911,0.0,0.0,0.0,2.0,4.0,0.007096392667060911,3.0,23.0,0.008870490833826138,15.0,12.0,0.03784742755765819,1.0,85.0,0.0035481963335304554,0.0,0.0,0.0,10.0,21.0,0.03075103489059728,7.0,65.0,0.03311649911295091,6.0,32.0,0.016558249556475457,15.0,21.0,0.03784742755765819,4.0,8.0,0.013010053222945003,0.0,0.0,0.0,119.0,314.0,0.4458900059136606,43.0,61.0,0.12063867534003549,"cherry, c"
+60,0.0,0.0,0.0,4.0,220.0,0.01001115277546038,21.0,158.0,0.08592906132270162,0.0,0.0,0.0,0.0,0.0,0.0,13.0,40.0,0.04698216434097635,0.0,0.0,0.0,1.0,274.0,0.00166852546257673,7.0,186.0,0.017519517357055668,1.0,12.0,0.00166852546257673,1.0,11.0,0.002502788193865095,0.0,0.0,0.0,12.0,146.0,0.015016729163190572,7.0,71.0,0.013304295135809191,11.0,60.0,0.032869951612761586,26.0,9.0,0.055061340265032094,73.0,191.0,0.21857683559755164,0.0,0.0,0.0,6.0,43.0,0.02002230555092076,213.0,203.0,0.4788668077595215,"mccallum, a"
+61,0.0,0.0,0.0,0.0,766.0,0.0,19.0,83.0,0.05046071967590753,0.0,0.0,0.0,0.0,0.0,0.0,28.0,714.0,0.03120780046071967,0.0,0.0,0.0,2.0,235.0,0.005560409881642703,8.0,718.0,0.03336245928985622,24.0,31.0,0.07228532846135513,0.0,236.0,0.0,0.0,0.0,0.0,65.0,221.0,0.13344983715942488,30.0,330.0,0.10981809516244338,9.0,97.0,0.007774644530939708,70.0,262.0,0.17167765509571845,41.0,191.0,0.10523075701008816,0.0,0.0,0.0,92.0,872.0,0.2032726983874811,27.0,163.0,0.0758995948844229,"zhang, j"
+62,0.0,0.0,0.0,1.0,445.0,0.0023712915634715712,35.0,157.0,0.0664752068293197,0.0,0.0,0.0,0.0,0.0,0.0,39.0,576.0,0.04729409284923856,0.0,0.0,0.0,16.0,474.0,0.05216841439637457,2.0,628.0,0.002766506824050166,78.0,137.0,0.18298466564788957,26.0,155.0,0.04228803288190968,0.0,0.0,0.0,72.0,148.0,0.14056489434578703,22.0,235.0,0.031748959266480484,11.0,95.0,0.013516361911787957,113.0,311.0,0.20543289244875376,34.0,229.0,0.05991463350371503,0.0,0.0,0.0,16.0,442.0,0.035095115139379254,79.0,157.0,0.11737893239184276,"chen, j"
+63,0.0,0.0,0.0,0.0,19.0,0.0,12.0,7.0,0.1091679915209327,0.0,0.0,0.0,0.0,0.0,0.0,1.0,5.0,0.005564387917329093,0.0,0.0,0.0,32.0,17.0,0.27821939586645467,0.0,10.0,0.0,6.0,13.0,0.06677265500794913,0.0,25.0,0.0,0.0,0.0,0.0,35.0,10.0,0.3404875463698993,4.0,17.0,0.03974562798092209,12.0,13.0,0.09326974032856383,0.0,5.0,0.0,3.0,3.0,0.033386327503974564,0.0,0.0,0.0,2.0,42.0,0.033386327503974564,0.0,0.0,0.0,"vilnat, a"
+64,0.0,0.0,0.0,6.0,70.0,0.011623136879529605,1.0,26.0,0.004102283604539861,0.0,0.0,0.0,0.0,0.0,0.0,23.0,308.0,0.04478326268289348,0.0,0.0,0.0,12.0,86.0,0.016682619991795433,2.0,37.0,0.004102283604539861,103.0,51.0,0.14838871416199462,18.0,393.0,0.0382879803090387,0.0,0.0,0.0,118.0,181.0,0.25946943798714617,69.0,121.0,0.16238205934636946,47.0,83.0,0.0823875290578422,7.0,113.0,0.016409134418159443,70.0,89.0,0.12990564747709557,0.0,0.0,0.0,17.0,12.0,0.02712065271890241,20.0,29.0,0.05435525776015315,"mihalcea, r"
+65,0.0,0.0,0.0,0.0,13.0,0.0,4.0,2.0,0.015457277801631603,0.0,0.0,0.0,0.0,0.0,0.0,1.0,20.0,0.0012881064834693002,0.0,0.0,0.0,1.0,14.0,0.005152425933877201,0.0,5.0,0.0,107.0,39.0,0.37355088020609706,0.0,205.0,0.0,0.0,0.0,0.0,167.0,97.0,0.4413911550021468,4.0,21.0,0.010304851867754402,5.0,29.0,0.012881064834693003,3.0,0.0,0.007728638900815802,17.0,30.0,0.03349076857020181,0.0,0.0,0.0,24.0,3.0,0.05753542292829541,9.0,10.0,0.041219407471017606,"pedersen, t"
+66,0.0,0.0,0.0,4.0,154.0,0.01590372942455006,27.0,74.0,0.0912192480565264,0.0,0.0,0.0,0.0,0.0,0.0,12.0,484.0,0.011624868888897303,0.0,0.0,0.0,20.0,172.0,0.019595666612392034,0.0,182.0,0.0,155.0,53.0,0.3015082036737615,92.0,166.0,0.20277255016301324,0.0,0.0,0.0,36.0,97.0,0.053675086807856445,17.0,128.0,0.03626050308797413,13.0,60.0,0.012741916550859747,28.0,99.0,0.04294006944628516,69.0,74.0,0.13557929334428925,0.0,0.0,0.0,6.0,185.0,0.018686882073846316,42.0,57.0,0.05749198186974845,"kim, s"
+67,0.0,0.0,0.0,4.0,6.0,0.010431219148738008,57.0,32.0,0.2008941044984633,0.0,0.0,0.0,0.0,0.0,0.0,31.0,14.0,0.06091086895780944,0.0,0.0,0.0,0.0,2.0,0.0,0.0,5.0,0.0,14.0,30.0,0.04498463257893266,0.0,2.0,0.0,0.0,0.0,0.0,16.0,42.0,0.04563658377572879,8.0,0.0,0.024122194281456642,14.0,83.0,0.04936201918599236,17.0,5.0,0.03688181056160939,131.0,93.0,0.47853217844835616,0.0,0.0,0.0,5.0,14.0,0.018254633510291517,8.0,1.0,0.029989755052621775,"nugues, p"
+68,0.0,0.0,0.0,0.0,94.0,0.0,173.0,116.0,0.6421267893660532,0.0,0.0,0.0,0.0,0.0,0.0,5.0,22.0,0.014314928425357872,0.0,0.0,0.0,0.0,5.0,0.0,0.0,58.0,0.0,2.0,2.0,0.006134969325153374,0.0,8.0,0.0,0.0,0.0,0.0,2.0,1.0,0.006134969325153374,28.0,22.0,0.08128834355828221,24.0,0.0,0.029652351738241305,24.0,12.0,0.06595092024539877,22.0,39.0,0.05828220858895705,0.0,0.0,0.0,19.0,10.0,0.06850715746421268,9.0,30.0,0.027607361963190184,"shieber, s"
+69,0.0,0.0,0.0,5.0,6.0,0.028400126222783213,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,11.0,0.009466708740927737,0.0,0.0,0.0,1.0,0.0,0.0018933417481855476,0.0,28.0,0.0,2.0,1.0,0.00757336699274219,0.0,4.0,0.0,0.0,0.0,0.0,20.0,12.0,0.07573366992742189,0.0,14.0,0.0,11.0,3.0,0.04165351846008205,0.0,13.0,0.0,0.0,27.0,0.0,0.0,0.0,0.0,220.0,120.0,0.8352792679078573,0.0,1.0,0.0,"simard, m"
+70,0.0,0.0,0.0,0.0,413.0,0.0,45.0,63.0,0.0828498491529357,0.0,0.0,0.0,0.0,0.0,0.0,12.0,280.0,0.027345865243289236,0.0,0.0,0.0,11.0,145.0,0.020712462288233924,4.0,280.0,0.00812253423067997,4.0,23.0,0.006498027384543977,1.0,158.0,0.002707511410226657,0.0,0.0,0.0,62.0,176.0,0.14282122688945617,14.0,199.0,0.026804362961243902,17.0,51.0,0.03935561228436605,151.0,221.0,0.3060454861916918,15.0,134.0,0.02423222712152858,0.0,0.0,0.0,170.0,617.0,0.28343776591629916,16.0,88.0,0.029067068925504754,"li, m"
+71,0.0,0.0,0.0,3.0,271.0,0.01151484135107472,19.0,21.0,0.08060388945752305,0.0,0.0,0.0,0.0,0.0,0.0,5.0,118.0,0.0191914022517912,0.0,0.0,0.0,3.0,280.0,0.01023541453428864,2.0,400.0,0.015353121801432959,5.0,37.0,0.0127942681678608,2.0,115.0,0.0076765609007164795,0.0,0.0,0.0,29.0,84.0,0.06320368474923235,0.0,82.0,0.0,8.0,99.0,0.025268679631525078,109.0,392.0,0.35312180143295807,62.0,53.0,0.15960849539406347,0.0,0.0,0.0,8.0,91.0,0.02840327533265098,35.0,32.0,0.21302456499488232,"lin, y"
+72,0.0,0.0,0.0,2.0,452.0,0.01985487035242395,7.0,52.0,0.023932209799796725,0.0,0.0,0.0,0.0,0.0,0.0,6.0,204.0,0.02117852837591888,0.0,0.0,0.0,4.0,306.0,0.009100148911527644,2.0,215.0,0.009927435176211976,12.0,19.0,0.0504644621457442,0.0,219.0,0.0,0.0,0.0,0.0,30.0,218.0,0.13319308861417734,8.0,132.0,0.022868556030916875,10.0,145.0,0.0377242536696055,41.0,50.0,0.14102867137825892,79.0,206.0,0.32920084146831496,0.0,0.0,0.0,24.0,106.0,0.15850804831351786,7.0,72.0,0.04301888576358522,"ji, h"
+73,0.0,0.0,0.0,0.0,70.0,0.0,2.0,13.0,0.009189842805320435,0.0,0.0,0.0,0.0,0.0,0.0,30.0,271.0,0.16154776299879078,0.0,0.0,0.0,1.0,52.0,0.004594921402660218,1.0,30.0,0.002297460701330109,0.0,43.0,0.0,0.0,21.0,0.0,0.0,0.0,0.0,4.0,18.0,0.01837968561064087,5.0,31.0,0.01366384522370012,22.0,72.0,0.10108827085852479,8.0,18.0,0.04365175332527207,0.0,92.0,0.0,0.0,0.0,0.0,118.0,19.0,0.6203143893591294,4.0,3.0,0.025272067714631197,"voss, c"
+74,0.0,0.0,0.0,8.0,218.0,0.020450637951316623,51.0,95.0,0.11627906976744184,0.0,0.0,0.0,0.0,0.0,0.0,26.0,288.0,0.04026784906343317,0.0,0.0,0.0,11.0,219.0,0.013392453171658674,4.0,270.0,0.007239163876572256,109.0,172.0,0.24975115374174287,11.0,199.0,0.019907700660573704,0.0,0.0,0.0,30.0,174.0,0.04162519229029047,12.0,110.0,0.015111754592344583,65.0,227.0,0.0790878653515519,35.0,65.0,0.06424757940457876,72.0,146.0,0.19156637408379332,0.0,0.0,0.0,81.0,173.0,0.11528368473441315,16.0,43.0,0.025789521310288662,"kim, j"
+75,0.0,0.0,0.0,28.0,18.0,0.016443927609319394,589.0,232.0,0.37192114628316975,0.0,0.0,0.0,0.0,0.0,0.0,41.0,22.0,0.01918312130756689,0.0,0.0,0.0,8.0,5.0,0.00414166087175022,21.0,31.0,0.01871417134642692,66.0,40.0,0.04871820343955073,33.0,3.0,0.029758600337760836,0.0,0.0,0.0,74.0,115.0,0.045251479895048706,25.0,16.0,0.014283251620141765,161.0,122.0,0.0665441455760926,77.0,29.0,0.03902364910271318,143.0,74.0,0.08497197987459604,0.0,0.0,0.0,122.0,89.0,0.0778939033583245,246.0,29.0,0.16315075937753828,"tsujii, j"
+76,0.0,0.0,0.0,0.0,23.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,9.0,0.010190217391304348,0.0,0.0,0.0,0.0,6.0,0.0,0.0,10.0,0.0,3.0,3.0,0.01834239130434783,0.0,50.0,0.0,0.0,0.0,0.0,5.0,26.0,0.030570652173913047,0.0,18.0,0.0,0.0,4.0,0.0,1.0,44.0,0.007642663043478262,0.0,28.0,0.0,0.0,0.0,0.0,108.0,110.0,0.9332540760869565,0.0,8.0,0.0,"goutte, c"
+77,0.0,0.0,0.0,3.0,172.0,0.015189873417721522,58.0,31.0,0.43881856540084385,0.0,0.0,0.0,0.0,0.0,0.0,0.0,157.0,0.0,0.0,0.0,0.0,0.0,74.0,0.0,0.0,275.0,0.0,15.0,26.0,0.09620253164556962,0.0,16.0,0.0,0.0,0.0,0.0,1.0,38.0,0.008438818565400843,0.0,78.0,0.0,5.0,24.0,0.04219409282700422,27.0,99.0,0.23037974683544302,0.0,42.0,0.0,0.0,0.0,0.0,1.0,169.0,0.008438818565400843,19.0,36.0,0.160337552742616,"cheng, y"
+78,0.0,0.0,0.0,4.0,5.0,0.006723034970072204,121.0,142.0,0.2983346767969541,0.0,0.0,0.0,0.0,0.0,0.0,2.0,12.0,0.00480216783576586,0.0,0.0,0.0,4.0,14.0,0.01440650350729758,5.0,41.0,0.012005419589414649,49.0,86.0,0.10804877630473184,0.0,4.0,0.0,0.0,0.0,0.0,10.0,40.0,0.021009484281475636,0.0,6.0,0.0,9.0,57.0,0.02088943008558149,135.0,26.0,0.29348105716294787,38.0,50.0,0.07263278851595864,0.0,0.0,0.0,12.0,14.0,0.031214090932478088,63.0,39.0,0.1164525700173221,"asahara, m"
+79,0.0,0.0,0.0,6.0,3.0,0.09137055837563453,21.0,0.0,0.1472081218274112,0.0,0.0,0.0,0.0,0.0,0.0,1.0,32.0,0.015228426395939089,0.0,0.0,0.0,4.0,1.0,0.030456852791878177,0.0,0.0,0.0,22.0,12.0,0.16497461928934012,0.0,1.0,0.0,0.0,0.0,0.0,1.0,2.0,0.015228426395939089,12.0,21.0,0.08121827411167513,12.0,15.0,0.12182741116751271,3.0,28.0,0.03553299492385788,0.0,5.0,0.0,0.0,0.0,0.0,35.0,61.0,0.29695431472081224,0.0,10.0,0.0,"carl, m"
+80,0.0,0.0,0.0,14.0,25.0,0.03771811739605293,10.0,61.0,0.013038361569005954,0.0,0.0,0.0,0.0,0.0,0.0,29.0,138.0,0.07052568303235038,0.0,0.0,0.0,0.0,68.0,0.0,0.0,120.0,0.0,15.0,176.0,0.033247822000965176,0.0,66.0,0.0,0.0,0.0,0.0,25.0,80.0,0.04444895989433847,85.0,231.0,0.18801910035305175,10.0,205.0,0.01765258692946585,9.0,97.0,0.013038361569005952,3.0,122.0,0.0053338751873206166,0.0,0.0,0.0,258.0,485.0,0.5663093816938017,7.0,47.0,0.010667750374641233,"callison-burch, c"
+81,0.0,0.0,0.0,1.0,3.0,0.0005898351738375333,76.0,1.0,0.17219910213979095,0.0,0.0,0.0,0.0,0.0,0.0,5.0,6.0,0.011796703476750665,0.0,0.0,0.0,2.0,0.0,0.0025559524199626443,18.0,2.0,0.031130189730314256,11.0,6.0,0.03244093456106433,2.0,52.0,0.005898351738375333,0.0,0.0,0.0,6.0,22.0,0.01671199659206344,8.0,27.0,0.016384310384375923,15.0,7.0,0.04030540354556477,14.0,1.0,0.028508700068814105,0.0,5.0,0.0,0.0,0.0,0.0,169.0,37.0,0.3507880853294885,122.0,5.0,0.2906904348395976,"osborne, m"
+82,0.0,0.0,0.0,0.0,105.0,0.0,26.0,43.0,0.03615715963973372,0.0,0.0,0.0,0.0,0.0,0.0,36.0,74.0,0.051494582952617156,0.0,0.0,0.0,18.0,152.0,0.028064221381020753,9.0,83.0,0.015663751468476702,67.0,27.0,0.09332985249967367,0.0,100.0,0.0,0.0,0.0,0.0,87.0,72.0,0.13575251272679806,23.0,139.0,0.03752773789322542,7.0,49.0,0.007766610103119698,87.0,354.0,0.14162641952747684,114.0,191.0,0.18013314188748206,0.0,0.0,0.0,90.0,138.0,0.13999477874951052,82.0,101.0,0.13248923117086542,"ng, h"
+83,0.0,0.0,0.0,0.0,4.0,0.0,18.0,1.0,0.09132420091324203,0.0,0.0,0.0,0.0,0.0,0.0,29.0,11.0,0.2004058853373922,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,4.0,25.0,0.03044140030441401,0.0,30.0,0.0,0.0,0.0,0.0,5.0,17.0,0.03805175038051751,0.0,0.0,0.0,6.0,75.0,0.031456113647894475,1.0,11.0,0.007610350076103502,106.0,83.0,0.5578386605783867,0.0,0.0,0.0,0.0,2.0,0.0,9.0,8.0,0.04287163876204973,"morante, r"
+84,0.0,0.0,0.0,1.0,88.0,0.0017743679870217653,40.0,10.0,0.11710828714343649,0.0,0.0,0.0,0.0,0.0,0.0,13.0,36.0,0.0544139516020008,0.0,0.0,0.0,5.0,57.0,0.02010950385291334,8.0,53.0,0.028389887792348245,25.0,63.0,0.04922603758280383,0.0,191.0,0.0,0.0,0.0,0.0,64.0,35.0,0.21718264161146408,18.0,18.0,0.03903609571447884,16.0,72.0,0.05323103961065296,29.0,62.0,0.0731715560362309,67.0,37.0,0.17175882114370689,0.0,0.0,0.0,2.0,11.0,0.007097471948087061,64.0,21.0,0.16750033797485464,"daelemans, w"
+85,0.0,0.0,0.0,16.0,32.0,0.12844543535307698,0.0,20.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,9.0,0.009875106051338713,0.0,0.0,0.0,30.0,13.0,0.04723477300080895,0.0,19.0,0.0,55.0,40.0,0.4309137186038711,0.0,14.0,0.0,0.0,0.0,0.0,49.0,38.0,0.2322968253655072,5.0,12.0,0.022788706272320106,2.0,18.0,0.005524534853895783,13.0,5.0,0.10772842965096778,4.0,19.0,0.015192470848213404,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,"ferret, o"
+86,0.0,0.0,0.0,2.0,595.0,0.0032831737346101226,75.0,93.0,0.15102599179206563,0.0,0.0,0.0,0.0,0.0,0.0,16.0,439.0,0.05143638850889193,0.0,0.0,0.0,46.0,344.0,0.09958960328317372,8.0,530.0,0.017510259917920653,62.0,49.0,0.1266757865937072,13.0,407.0,0.02462380300957592,0.0,0.0,0.0,20.0,123.0,0.03720930232558139,6.0,179.0,0.010943912448700409,21.0,63.0,0.03824897400820793,119.0,266.0,0.23414500683994524,25.0,103.0,0.058331053351573174,0.0,0.0,0.0,19.0,176.0,0.03994528043775649,53.0,139.0,0.10703146374828998,"wu, y"
+87,0.0,0.0,0.0,0.0,4.0,0.0,2.0,8.0,0.01699029126213592,0.0,0.0,0.0,0.0,0.0,0.0,6.0,23.0,0.0412621359223301,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,8.0,23.0,0.05825242718446603,3.0,213.0,0.02184466019417476,0.0,0.0,0.0,8.0,15.0,0.05825242718446603,2.0,24.0,0.00970873786407767,2.0,44.0,0.014563106796116507,3.0,12.0,0.029126213592233014,7.0,1.0,0.06796116504854369,0.0,0.0,0.0,51.0,0.0,0.4563106796116505,16.0,8.0,0.22572815533980586,"aramaki, e"
+88,0.0,0.0,0.0,4.0,174.0,0.004840689452483445,78.0,307.0,0.12283249485676742,0.0,0.0,0.0,0.0,0.0,0.0,47.0,119.0,0.061373026986843694,0.0,0.0,0.0,12.0,132.0,0.015732240720571198,1.0,115.0,0.0012101723631208613,123.0,139.0,0.21474508583579688,4.0,75.0,0.006655947997164738,0.0,0.0,0.0,80.0,99.0,0.12622097747350586,18.0,71.0,0.023027851252528393,29.0,85.0,0.02188683159587158,28.0,171.0,0.026926335079439166,133.0,295.0,0.20542675863976625,0.0,0.0,0.0,102.0,653.0,0.1346316753971958,21.0,74.0,0.03448991234894455,"kurohashi, s"
+89,0.0,0.0,0.0,3.0,16.0,0.02629656683710738,50.0,5.0,0.17816588086858356,0.0,0.0,0.0,0.0,0.0,0.0,42.0,23.0,0.11315492396573477,0.0,0.0,0.0,17.0,17.0,0.05080018593532107,0.0,0.0,0.0,18.0,0.0,0.050468158576266686,1.0,5.0,0.003984328308652633,0.0,0.0,0.0,22.0,6.0,0.07669831994156318,8.0,0.0,0.010624875489740355,1.0,1.0,0.00239059698519158,19.0,54.0,0.04542134271864002,2.0,1.0,0.006374925293844213,0.0,0.0,0.0,66.0,32.0,0.42964340261637557,2.0,9.0,0.005976492462978949,"kashioka, h"
+90,0.0,0.0,0.0,1.0,1.0,0.0017513134851138352,26.0,2.0,0.05814360770577933,0.0,0.0,0.0,0.0,0.0,0.0,21.0,46.0,0.0635726795096322,0.0,0.0,0.0,4.0,0.0,0.010507880910683012,0.0,0.0,0.0,95.0,24.0,0.31436077057793343,1.0,2.0,0.002626970227670753,0.0,0.0,0.0,24.0,9.0,0.07530647985989491,33.0,52.0,0.08476357267950962,11.0,0.0,0.02626970227670753,18.0,34.0,0.0563922942206655,15.0,8.0,0.0563922942206655,0.0,0.0,0.0,75.0,77.0,0.24640980735551657,1.0,2.0,0.0035026269702276703,"tanaka, h"
+91,0.0,0.0,0.0,0.0,30.0,0.0,8.0,16.0,0.031374548633151915,0.0,0.0,0.0,0.0,0.0,0.0,1.0,15.0,0.003277937916896469,0.0,0.0,0.0,0.0,0.0,0.0,0.0,24.0,0.0,60.0,97.0,0.258082978656982,0.0,117.0,0.0,0.0,0.0,0.0,20.0,15.0,0.07848319926740689,6.0,11.0,0.029282912057608457,62.0,190.0,0.2676982632132116,1.0,1.0,0.004370583889195292,28.0,23.0,0.16753904908581954,0.0,0.0,0.0,4.0,8.0,0.021124488797777247,32.0,17.0,0.13876603848195052,"hahn, u"
+92,0.0,0.0,0.0,0.0,7.0,0.0,1.0,0.0,0.002792256142963514,0.0,0.0,0.0,0.0,0.0,0.0,2.0,3.0,0.01465934475055845,0.0,0.0,0.0,0.0,0.0,0.0,0.0,26.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,10.0,0.015357408786299329,4.0,1.0,0.02736411020104244,66.0,20.0,0.48050074460163805,0.0,27.0,0.0,2.0,6.0,0.008144080416976916,0.0,0.0,0.0,0.0,0.0,0.0,57.0,11.0,0.4511820551005212,"tomanek, k"
+93,0.0,0.0,0.0,2.0,477.0,0.014968192590744667,5.0,139.0,0.01621554197330672,0.0,0.0,0.0,0.0,0.0,0.0,30.0,533.0,0.179618311088936,0.0,0.0,0.0,2.0,422.0,0.0039915180241985775,0.0,661.0,0.0,17.0,78.0,0.05114132468504427,1.0,484.0,0.0037420481476861667,0.0,0.0,0.0,30.0,275.0,0.11101409504802295,7.0,175.0,0.021204939503554947,10.0,145.0,0.0336784333291755,56.0,390.0,0.22577023824373205,52.0,172.0,0.10602469751777471,0.0,0.0,0.0,52.0,200.0,0.15654234751153798,30.0,228.0,0.07608831233628538,"yang, y"
+94,0.0,0.0,0.0,12.0,327.0,0.01733626857457347,37.0,77.0,0.05701706108970831,0.0,0.0,0.0,0.0,0.0,0.0,8.0,447.0,0.013098514034122178,0.0,0.0,0.0,0.0,332.0,0.0,11.0,242.0,0.01695101816180517,25.0,9.0,0.05162355531095211,0.0,195.0,0.0,0.0,0.0,0.0,4.0,115.0,0.00616400660429279,8.0,194.0,0.01155751238304898,4.0,96.0,0.007705008255365986,35.0,141.0,0.05145844799119428,3.0,107.0,0.004623004953219593,0.0,0.0,0.0,429.0,485.0,0.7439735828288386,12.0,151.0,0.01849201981287837,"wu, h"
+95,0.0,0.0,0.0,0.0,7.0,0.0,20.0,23.0,0.04753712423035132,0.0,0.0,0.0,0.0,0.0,0.0,7.0,36.0,0.026620789568996737,0.0,0.0,0.0,10.0,22.0,0.036128214415067,0.0,6.0,0.0,49.0,112.0,0.21043100325968853,17.0,39.0,0.0646504889532778,0.0,0.0,0.0,9.0,35.0,0.024085476276711336,30.0,28.0,0.1254980079681275,7.0,1.0,0.017747193045997825,12.0,2.0,0.03151032234697573,10.0,12.0,0.03422672944585295,0.0,0.0,0.0,140.0,50.0,0.3226186164433177,24.0,2.0,0.058946034045635635,"yamamoto, k"
+96,0.0,0.0,0.0,2.0,17.0,0.000969422789514077,55.0,114.0,0.0666478167790928,0.0,0.0,0.0,0.0,0.0,0.0,42.0,92.0,0.03732277739629196,0.0,0.0,0.0,51.0,31.0,0.03413176071414146,0.0,3.0,0.0,187.0,127.0,0.15256291149977785,8.0,17.0,0.005049077028719151,0.0,0.0,0.0,173.0,181.0,0.20806236619945875,21.0,33.0,0.01716686189764511,18.0,17.0,0.013814274750575596,119.0,512.0,0.1107565537019833,41.0,13.0,0.0302944621723149,0.0,0.0,0.0,308.0,211.0,0.30827644706547647,17.0,87.0,0.014945268005008686,"chang, j"
+97,0.0,0.0,0.0,0.0,138.0,0.0,5.0,20.0,0.03567508232711306,0.0,0.0,0.0,0.0,0.0,0.0,6.0,113.0,0.034248079034028545,0.0,0.0,0.0,10.0,235.0,0.036004390779363335,0.0,228.0,0.0,10.0,10.0,0.066410537870472,0.0,237.0,0.0,0.0,0.0,0.0,35.0,75.0,0.25210391511159896,1.0,37.0,0.0057080131723380905,3.0,29.0,0.017124039517014272,18.0,69.0,0.160300036589828,0.0,87.0,0.0,0.0,0.0,0.0,51.0,111.0,0.34485912916209294,5.0,44.0,0.047566776436150746,"sun, y"
+98,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.006928406466512703,0.0,0.0,0.0,0.0,0.0,0.0,16.0,44.0,0.15935334872979218,0.0,0.0,0.0,9.0,5.0,0.046189376443418015,0.0,6.0,0.0,1.0,4.0,0.006928406466512703,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13.0,0.0,29.0,127.0,0.2771362586605081,0.0,11.0,0.0,73.0,233.0,0.4295612009237876,0.0,54.0,0.0,0.0,0.0,0.0,13.0,0.0,0.04503464203233257,8.0,0.0,0.028868360277136264,"zechner, k"
+99,0.0,0.0,0.0,9.0,195.0,0.021874241450227132,1.0,47.0,0.0036970548929961353,0.0,0.0,0.0,0.0,0.0,0.0,48.0,82.0,0.11665461878455988,0.0,0.0,0.0,2.0,12.0,0.007394109785992271,9.0,77.0,0.022182329357976812,9.0,36.0,0.027223767848426086,1.0,17.0,0.0036970548929961353,0.0,0.0,0.0,21.0,50.0,0.0517587685019459,19.0,21.0,0.04832578895844949,8.0,58.0,0.008242351817718655,30.0,531.0,0.07020003040867662,2.0,17.0,0.0049294065239948465,0.0,0.0,0.0,323.0,1019.0,0.5663749389825897,21.0,30.0,0.0474455377934504,"waibel, a"
+100,0.0,0.0,0.0,0.0,20.0,0.0,9.0,6.0,0.027420736932305054,0.0,0.0,0.0,0.0,0.0,0.0,11.0,28.0,0.04113110539845758,0.0,0.0,0.0,3.0,4.0,0.013710368466152527,0.0,4.0,0.0,24.0,1.0,0.11568123393316196,0.0,0.0,0.0,0.0,0.0,0.0,1.0,12.0,0.005141388174807198,3.0,64.0,0.010282776349614395,30.0,8.0,0.11568123393316193,1.0,5.0,0.002570694087403599,168.0,28.0,0.5758354755784061,0.0,0.0,0.0,16.0,23.0,0.04113110539845758,12.0,4.0,0.05141388174807199,"pighin, d"
+101,0.0,0.0,0.0,0.0,180.0,0.0,17.0,51.0,0.016025641025641024,0.0,0.0,0.0,0.0,0.0,0.0,87.0,41.0,0.10970996216897856,0.0,0.0,0.0,38.0,355.0,0.044136191677175286,0.0,19.0,0.0,43.0,24.0,0.06181693989071039,2.0,147.0,0.004203446826397646,0.0,0.0,0.0,24.0,113.0,0.03467843631778058,10.0,52.0,0.015605296343001262,45.0,51.0,0.0419556536359815,7.0,5.0,0.007198402690205968,431.0,372.0,0.5924758301807481,0.0,0.0,0.0,5.0,13.0,0.00546448087431694,41.0,57.0,0.06672971836906262,"moschitti, a"
+102,0.0,0.0,0.0,0.0,45.0,0.0,11.0,6.0,0.037523452157598496,0.0,0.0,0.0,0.0,0.0,0.0,12.0,56.0,0.036386377849792484,0.0,0.0,0.0,13.0,10.0,0.03513559611120587,0.0,65.0,0.0,60.0,41.0,0.14966740576496673,0.0,62.0,0.0,0.0,0.0,0.0,18.0,38.0,0.053442492466882714,7.0,3.0,0.020467337540508272,27.0,32.0,0.056171470805617144,1.0,1.0,0.002558417192563534,171.0,152.0,0.5755585877537097,0.0,0.0,0.0,2.0,0.0,0.0040934675081016544,8.0,52.0,0.02899539484905338,"basili, r"
+103,0.0,0.0,0.0,5.0,189.0,0.00734284652162551,9.0,68.0,0.018182286624977458,0.0,0.0,0.0,0.0,0.0,0.0,126.0,182.0,0.14609872171899904,0.0,0.0,0.0,47.0,61.0,0.06902275730327981,11.0,70.0,0.010909371974986475,70.0,131.0,0.15105284273058195,48.0,92.0,0.07972233366336269,0.0,0.0,0.0,65.0,81.0,0.09958298520756885,15.0,90.0,0.014718818666656851,16.0,51.0,0.022028539564876534,108.0,395.0,0.1704239711733464,32.0,55.0,0.07552642136529096,0.0,0.0,0.0,31.0,45.0,0.050840470678302344,81.0,82.0,0.08454763280614518,"lee, c"
+104,0.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,0.044444444444444446,0.0,0.0,0.0,0.0,0.0,0.0,7.0,6.0,0.07777777777777778,0.0,0.0,0.0,0.0,0.0,0.0,4.0,22.0,0.044444444444444446,30.0,9.0,0.3296296296296296,0.0,46.0,0.0,0.0,0.0,0.0,16.0,1.0,0.10462962962962963,0.0,2.0,0.0,2.0,3.0,0.005555555555555556,12.0,2.0,0.13333333333333333,20.0,0.0,0.1638888888888889,0.0,0.0,0.0,10.0,67.0,0.07407407407407407,2.0,2.0,0.022222222222222223,"garera, n"
+105,0.0,0.0,0.0,6.0,31.0,0.010804970286331712,97.0,182.0,0.1961102106969206,0.0,0.0,0.0,0.0,0.0,0.0,13.0,11.0,0.028555992899590953,0.0,0.0,0.0,6.0,4.0,0.01389210465385506,4.0,34.0,0.00926140310257004,91.0,87.0,0.18407038666357958,2.0,80.0,0.00463070155128502,0.0,0.0,0.0,71.0,18.0,0.12861773558694142,3.0,10.0,0.0030871343675233465,16.0,74.0,0.021609940572663425,21.0,43.0,0.046114069614879995,25.0,3.0,0.04572817781893957,0.0,0.0,0.0,122.0,165.0,0.24484834452419543,29.0,12.0,0.06266882766072393,"yarowsky, d"
+106,0.0,0.0,0.0,9.0,27.0,0.08833922261484099,1.0,11.0,0.007067137809187279,0.0,0.0,0.0,0.0,0.0,0.0,0.0,123.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11.0,0.0,1.0,3.0,0.007067137809187279,0.0,0.0,0.0,0.0,0.0,0.0,4.0,1.0,0.028268551236749116,1.0,0.0,0.01177856301531213,0.0,26.0,0.0,2.0,21.0,0.02355712603062426,0.0,5.0,0.0,0.0,0.0,0.0,84.0,137.0,0.8103651354534747,2.0,24.0,0.02355712603062426,"matsoukas, s"
+107,0.0,0.0,0.0,1.0,25.0,0.003012048192771084,2.0,0.0,0.01506024096385542,0.0,0.0,0.0,0.0,0.0,0.0,12.0,131.0,0.03840361445783132,0.0,0.0,0.0,18.0,13.0,0.06325301204819277,0.0,44.0,0.0,38.0,57.0,0.16566265060240962,1.0,28.0,0.00251004016064257,0.0,0.0,0.0,58.0,3.0,0.303714859437751,3.0,10.0,0.015562248995983933,36.0,82.0,0.13177710843373494,2.0,3.0,0.01506024096385542,25.0,176.0,0.14683734939759036,0.0,0.0,0.0,8.0,8.0,0.05396586345381525,8.0,25.0,0.04518072289156626,"magnini, b"
+108,0.0,0.0,0.0,0.0,158.0,0.0,3.0,32.0,0.03896103896103896,0.0,0.0,0.0,0.0,0.0,0.0,2.0,44.0,0.008658008658008658,0.0,0.0,0.0,0.0,12.0,0.0,0.0,60.0,0.0,0.0,55.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,26.0,0.0,1.0,19.0,0.004329004329004329,0.0,27.0,0.0,3.0,289.0,0.012987012987012988,0.0,5.0,0.0,0.0,0.0,0.0,101.0,944.0,0.7792207792207793,12.0,31.0,0.15584415584415584,"niehues, j"
+109,0.0,0.0,0.0,3.0,25.0,0.012994772218073188,56.0,10.0,0.20657206870799102,0.0,0.0,0.0,0.0,0.0,0.0,9.0,26.0,0.043315907393577296,0.0,0.0,0.0,17.0,120.0,0.06945481702763255,0.0,7.0,0.0,38.0,43.0,0.15011202389843165,0.0,28.0,0.0,0.0,0.0,0.0,38.0,8.0,0.1822255414488424,34.0,36.0,0.14787154592979834,8.0,8.0,0.058252427184466014,25.0,28.0,0.04899178491411501,8.0,22.0,0.032860343539955185,0.0,0.0,0.0,12.0,20.0,0.02091112770724421,10.0,38.0,0.02643764002987304,"sekine, s"
+110,0.0,0.0,0.0,23.0,11.0,0.014062884891268607,115.0,0.0,0.09003950250579341,0.0,0.0,0.0,0.0,0.0,0.0,53.0,0.0,0.03998871254531743,0.0,0.0,0.0,44.0,3.0,0.02832190185723091,16.0,0.0,0.012529504969097544,340.0,157.0,0.2623997064114392,8.0,3.0,0.0066073561360475335,0.0,0.0,0.0,73.0,3.0,0.07328222186445947,22.0,7.0,0.015801719436473467,54.0,6.0,0.03155216485707636,180.0,16.0,0.1365470069779274,42.0,6.0,0.03372198798323518,0.0,0.0,0.0,211.0,13.0,0.21956349318754778,42.0,0.0,0.035581836377085604,"isahara, h"
+111,0.0,0.0,0.0,1.0,122.0,0.0043388429752066115,0.0,27.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,54.0,352.0,0.22567148760330577,0.0,0.0,0.0,0.0,51.0,0.0,0.0,39.0,0.0,12.0,33.0,0.06249999999999999,0.0,19.0,0.0,0.0,0.0,0.0,0.0,13.0,0.0,0.0,65.0,0.0,17.0,18.0,0.06128615702479339,98.0,259.0,0.434220041322314,1.0,11.0,0.0043388429752066115,0.0,0.0,0.0,43.0,483.0,0.20402892561983468,1.0,84.0,0.0036157024793388426,"nakamura, s"
+112,0.0,0.0,0.0,7.0,47.0,0.016226056672471106,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,41.0,52.0,0.1317872407788507,0.0,0.0,0.0,2.0,6.0,0.008310907076143738,0.0,25.0,0.0,16.0,33.0,0.04120824758587937,0.0,66.0,0.0,0.0,0.0,0.0,7.0,43.0,0.016621814152287476,26.0,41.0,0.05471347158461294,85.0,165.0,0.3012209118252335,4.0,19.0,0.011773785024536961,115.0,294.0,0.3973602976096248,0.0,0.0,0.0,2.0,6.0,0.016621814152287476,2.0,28.0,0.004155453538071869,"poesio, m"
+113,0.0,0.0,0.0,3.0,88.0,0.005901874914465582,17.0,25.0,0.042151361707951286,0.0,0.0,0.0,0.0,0.0,0.0,11.0,41.0,0.019039961680580267,0.0,0.0,0.0,0.0,12.0,0.0,2.0,68.0,0.007184891200218969,5.0,88.0,0.014369782400437937,0.0,7.0,0.0,0.0,0.0,0.0,0.0,101.0,0.0,10.0,63.0,0.012573559600383196,17.0,75.0,0.02814937730942932,18.0,209.0,0.025027371014096075,0.0,52.0,0.0,0.0,0.0,0.0,342.0,1132.0,0.8228496647050774,10.0,7.0,0.022752155467360067,"federico, m"
+114,0.0,0.0,0.0,0.0,2.0,0.0,180.0,42.0,0.5611374407582939,0.0,0.0,0.0,0.0,0.0,0.0,16.0,5.0,0.043601895734597156,0.0,0.0,0.0,0.0,0.0,0.0,1.0,9.0,0.005687203791469194,31.0,24.0,0.11374407582938385,0.0,1.0,0.0,0.0,0.0,0.0,4.0,8.0,0.007582938388625592,3.0,58.0,0.005687203791469194,25.0,208.0,0.06729857819905212,2.0,42.0,0.005687203791469194,0.0,7.0,0.0,0.0,0.0,0.0,41.0,89.0,0.18388625592417057,3.0,10.0,0.005687203791469194,"oflazer, k"
+115,0.0,0.0,0.0,0.0,22.0,0.0,45.0,9.0,0.3214011420175644,0.0,0.0,0.0,0.0,0.0,0.0,9.0,22.0,0.05509733863158247,0.0,0.0,0.0,34.0,88.0,0.24242828997896282,0.0,55.0,0.0,3.0,4.0,0.020202357498246906,0.0,10.0,0.0,0.0,0.0,0.0,0.0,24.0,0.0,6.0,44.0,0.021103950312218255,11.0,124.0,0.02871740074130965,19.0,23.0,0.13957992453334225,16.0,86.0,0.06832069990316227,0.0,0.0,0.0,14.0,10.0,0.06641733729588942,5.0,6.0,0.036731559087721646,"mitamura, t"
+116,0.0,0.0,0.0,0.0,242.0,0.0,49.0,59.0,0.13505074160811867,0.0,0.0,0.0,0.0,0.0,0.0,5.0,392.0,0.013466042154566742,0.0,0.0,0.0,9.0,168.0,0.02517564402810304,0.0,342.0,0.0,49.0,7.0,0.19067135050741604,0.0,117.0,0.0,0.0,0.0,0.0,14.0,114.0,0.04391100702576112,37.0,146.0,0.101288056206089,7.0,45.0,0.022638563622170177,51.0,93.0,0.16139734582357534,11.0,75.0,0.029078844652615142,0.0,0.0,0.0,82.0,472.0,0.2521467603434816,10.0,128.0,0.02517564402810304,"zhao, t"
+117,0.0,0.0,0.0,0.0,3.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,16.0,2.0,0.15245009074410162,0.0,0.0,0.0,5.0,0.0,0.04718693284936478,0.0,2.0,0.0,12.0,5.0,0.08711433756805807,4.0,1.0,0.043557168784029036,0.0,0.0,0.0,0.0,0.0,0.0,1.0,48.0,0.0054446460980036296,8.0,78.0,0.06533575317604355,6.0,1.0,0.06533575317604355,0.0,1.0,0.0,0.0,0.0,0.0,66.0,154.0,0.5335753176043556,0.0,6.0,0.0,"gaspari, f"
+118,0.0,0.0,0.0,1.0,178.0,0.011750881316098707,5.0,17.0,0.04700352526439483,0.0,0.0,0.0,0.0,0.0,0.0,9.0,61.0,0.08930669800235018,0.0,0.0,0.0,0.0,5.0,0.0,0.0,103.0,0.0,1.0,34.0,0.0070505287896592255,0.0,12.0,0.0,0.0,0.0,0.0,0.0,56.0,0.0,0.0,27.0,0.0,7.0,130.0,0.08225616921269097,34.0,374.0,0.3061104582843714,2.0,12.0,0.017626321974148065,0.0,0.0,0.0,53.0,572.0,0.43889541715628677,0.0,16.0,0.0,"besacier, l"
+119,0.0,0.0,0.0,11.0,31.0,0.028735321759984448,30.0,40.0,0.26445138221218767,0.0,0.0,0.0,0.0,0.0,0.0,14.0,47.0,0.0666961941902797,0.0,0.0,0.0,12.0,23.0,0.13067009474013982,0.0,1.0,0.0,2.0,51.0,0.00604954142315462,4.0,15.0,0.05444587280839158,0.0,0.0,0.0,2.0,16.0,0.019056055482937054,2.0,7.0,0.007777981829770226,11.0,49.0,0.058766973824930596,12.0,27.0,0.13572578292949045,18.0,56.0,0.1306700947401398,0.0,0.0,0.0,30.0,116.0,0.09695470405859415,0.0,13.0,0.0,"uszkoreit, h"
+120,0.0,0.0,0.0,10.0,72.0,0.019953952417498082,55.0,33.0,0.49936045024302883,0.0,0.0,0.0,0.0,0.0,0.0,23.0,68.0,0.11537477615758505,0.0,0.0,0.0,3.0,144.0,0.00920951650038373,0.0,67.0,0.0,4.0,65.0,0.017651573292402144,0.0,30.0,0.0,0.0,0.0,0.0,2.0,107.0,0.0061396776669224865,3.0,102.0,0.02302379125095932,19.0,95.0,0.10590943975441289,0.0,11.0,0.0,26.0,446.0,0.13430544896392937,0.0,0.0,0.0,9.0,27.0,0.06907137375287797,0.0,14.0,0.0,"frank, a"
+121,0.0,0.0,0.0,6.0,27.0,0.04538087520259319,0.0,38.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12.0,12.0,0.2366288492706645,0.0,0.0,0.0,9.0,10.0,0.036177355869414216,7.0,10.0,0.05672609400324148,2.0,7.0,0.04862236628849271,1.0,308.0,0.008103727714748784,0.0,0.0,0.0,3.0,9.0,0.011576753878212548,5.0,11.0,0.03727714748784441,48.0,131.0,0.33167399861078956,12.0,9.0,0.07293354943273907,6.0,10.0,0.0358879370224589,0.0,0.0,0.0,1.0,17.0,0.008103727714748784,11.0,14.0,0.07090761750405186,"bontcheva, k"
+122,0.0,0.0,0.0,15.0,10.0,0.06738351254480286,2.0,4.0,0.008602150537634409,0.0,0.0,0.0,0.0,0.0,0.0,5.0,25.0,0.043010752688172046,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,24.0,2.0,0.06881720430107527,0.0,0.0,0.0,0.0,0.0,0.0,28.0,15.0,0.09462365591397849,5.0,1.0,0.014336917562724013,140.0,260.0,0.5942652329749104,1.0,0.0,0.002867383512544803,22.0,20.0,0.06738351254480286,0.0,0.0,0.0,13.0,4.0,0.03870967741935484,0.0,1.0,0.0,"ide, n"
+123,0.0,0.0,0.0,12.0,30.0,0.11532125205930807,2.0,1.0,0.013179571663920923,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.04393190554640307,0.0,0.0,0.0,9.0,6.0,0.06754530477759473,10.0,26.0,0.09884678747940691,0.0,0.0,0.0,0.0,0.0,0.0,73.0,160.0,0.6611751784733663,0.0,0.0,0.0,"anderson, t"
+124,0.0,0.0,0.0,4.0,1.0,0.018368685764268537,29.0,2.0,0.09260879072818719,0.0,0.0,0.0,0.0,0.0,0.0,90.0,95.0,0.3101355783949267,0.0,0.0,0.0,1.0,5.0,0.004592171441067134,0.0,1.0,0.0,14.0,17.0,0.04592171441067134,0.0,0.0,0.0,0.0,0.0,0.0,15.0,4.0,0.04592171441067134,99.0,4.0,0.24185436256286905,22.0,9.0,0.06800787229389897,7.0,55.0,0.019134047671113055,26.0,3.0,0.10715066695823312,0.0,0.0,0.0,14.0,2.0,0.037885414388803856,3.0,4.0,0.008418980975289744,"dale, r"
+125,0.0,0.0,0.0,4.0,18.0,0.012195121951219513,37.0,61.0,0.16920731707317074,0.0,0.0,0.0,0.0,0.0,0.0,44.0,16.0,0.18292682926829268,0.0,0.0,0.0,35.0,30.0,0.14634146341463417,1.0,2.0,0.003048780487804878,27.0,63.0,0.12042682926829268,0.0,68.0,0.0,0.0,0.0,0.0,33.0,74.0,0.14481707317073172,6.0,8.0,0.022103658536585365,9.0,22.0,0.041158536585365856,0.0,0.0,0.0,27.0,30.0,0.07088414634146342,0.0,0.0,0.0,16.0,15.0,0.07317073170731708,3.0,16.0,0.013719512195121951,"manandhar, s"
+126,0.0,0.0,0.0,34.0,346.0,0.039391858380069436,116.0,276.0,0.20077448399526915,0.0,0.0,0.0,0.0,0.0,0.0,49.0,375.0,0.05360344893365382,0.0,0.0,0.0,40.0,277.0,0.037684559917591856,10.0,295.0,0.012685513715615581,98.0,299.0,0.1503567204608752,10.0,218.0,0.013353172332226928,0.0,0.0,0.0,56.0,141.0,0.08045286330166726,7.0,188.0,0.014688489565449622,10.0,138.0,0.013591621838159552,92.0,354.0,0.13090877875701043,60.0,118.0,0.10482240280798139,0.0,0.0,0.0,84.0,514.0,0.10657739117164551,34.0,120.0,0.041108694822784335,"lee, j"
+127,0.0,0.0,0.0,1.0,12.0,0.0035014005602240893,5.0,2.0,0.014005602240896357,0.0,0.0,0.0,0.0,0.0,0.0,22.0,62.0,0.0742296918767507,0.0,0.0,0.0,2.0,2.0,0.014005602240896357,0.0,46.0,0.0,76.0,22.0,0.5217086834733893,0.0,137.0,0.0,0.0,0.0,0.0,10.0,10.0,0.049019607843137254,2.0,5.0,0.009803921568627449,18.0,100.0,0.10644257703081231,1.0,9.0,0.0028011204481792717,46.0,120.0,0.15686274509803919,0.0,0.0,0.0,7.0,3.0,0.03711484593837534,2.0,17.0,0.010504201680672268,"tonelli, s"
+128,0.0,0.0,0.0,0.0,11.0,0.0,2.0,18.0,0.002529480492887342,0.0,0.0,0.0,0.0,0.0,0.0,33.0,47.0,0.06686059350445471,0.0,0.0,0.0,4.0,9.0,0.01131239887096839,0.0,82.0,0.0,23.0,32.0,0.06094843473338071,6.0,133.0,0.013279772587658546,0.0,0.0,0.0,9.0,51.0,0.019392683778802954,22.0,40.0,0.05058960985774684,4.0,10.0,0.0031618506161091775,100.0,49.0,0.3912985871044676,5.0,14.0,0.01264740246443671,0.0,0.0,0.0,233.0,120.0,0.3384685802387348,12.0,48.0,0.029510605750352324,"paul, m"
+129,0.0,0.0,0.0,0.0,25.0,0.0,23.0,7.0,0.08062418725617684,0.0,0.0,0.0,0.0,0.0,0.0,113.0,199.0,0.42956220199393147,0.0,0.0,0.0,22.0,5.0,0.09211096662332031,0.0,8.0,0.0,0.0,4.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,27.0,31.0,0.1332899869960988,0.0,31.0,0.0,0.0,6.0,0.0,56.0,109.0,0.23190290420459467,1.0,33.0,0.003250975292587776,0.0,0.0,0.0,4.0,61.0,0.013003901170351105,3.0,20.0,0.01625487646293888,"kawahara, t"
+130,0.0,0.0,0.0,1.0,104.0,0.003994407829039344,16.0,28.0,0.04733373277411624,0.0,0.0,0.0,0.0,0.0,0.0,0.0,84.0,0.0,0.0,0.0,0.0,0.0,84.0,0.0,0.0,58.0,0.0,8.0,5.0,0.03095666067505492,0.0,35.0,0.0,0.0,0.0,0.0,2.0,15.0,0.005991611743559017,0.0,63.0,0.0,1.0,44.0,0.005991611743559017,15.0,14.0,0.04933093668863591,0.0,39.0,0.0,0.0,0.0,0.0,238.0,217.0,0.8484122228879568,2.0,38.0,0.007988815658078689,"ma, y"
+131,0.0,0.0,0.0,0.0,28.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,32.0,0.009270183718186416,0.0,0.0,0.0,15.0,176.0,0.060677566155402,9.0,44.0,0.03370975897522333,92.0,21.0,0.3317040283161976,0.0,6.0,0.0,0.0,0.0,0.0,46.0,70.0,0.18793190628687012,3.0,13.0,0.011798415641328166,0.0,22.0,0.0,0.0,0.0,0.0,12.0,24.0,0.06404854205292435,0.0,0.0,0.0,18.0,6.0,0.08680262936120009,56.0,7.0,0.21405696949266814,"gliozzo, a"
+132,0.0,0.0,0.0,5.0,34.0,0.01283082225069589,61.0,31.0,0.060531082932001955,0.0,0.0,0.0,0.0,0.0,0.0,12.0,91.0,0.01399726063712279,0.0,0.0,0.0,11.0,53.0,0.018079794989616933,20.0,208.0,0.05734988733265587,165.0,110.0,0.318826492290019,3.0,23.0,0.005832191932134495,0.0,0.0,0.0,134.0,305.0,0.28013961914019353,3.0,44.0,0.005248972738921045,12.0,53.0,0.015552511819025318,3.0,17.0,0.007776255909512659,74.0,131.0,0.14658242389431364,0.0,0.0,0.0,7.0,214.0,0.008748287898201743,24.0,21.0,0.04850439623558521,"agirre, e"
+133,0.0,0.0,0.0,1.0,1.0,0.002291606989401318,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13.0,3.0,0.03462872783984214,0.0,0.0,0.0,39.0,21.0,0.19898787357968112,3.0,16.0,0.008593526210254943,85.0,89.0,0.3461281390241574,3.0,67.0,0.013367707438174352,0.0,0.0,0.0,65.0,55.0,0.24602947261211366,6.0,8.0,0.02234316814666285,21.0,89.0,0.07272669403863904,0.0,1.0,0.0,5.0,120.0,0.024348324262389004,0.0,0.0,0.0,0.0,22.0,0.0,8.0,6.0,0.030554759858684236,"rigau, g"
+134,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,28.0,0.007211538461538462,0.0,0.0,0.0,16.0,4.0,0.06490384615384616,0.0,0.0,0.0,61.0,49.0,0.34679487179487184,6.0,35.0,0.02043269230769231,0.0,0.0,0.0,18.0,7.0,0.06370192307692309,22.0,10.0,0.08052884615384616,6.0,41.0,0.025240384615384616,1.0,2.0,0.001201923076923077,96.0,22.0,0.3108173076923077,0.0,0.0,0.0,9.0,0.0,0.03325320512820513,14.0,8.0,0.04591346153846154,"girju, r"
+135,0.0,0.0,0.0,2.0,155.0,0.0041685965724872626,24.0,45.0,0.06252894858730894,0.0,0.0,0.0,0.0,0.0,0.0,10.0,72.0,0.020909151061999604,0.0,0.0,0.0,0.0,184.0,0.0,3.0,97.0,0.01000463177396943,56.0,100.0,0.15210745715609078,11.0,1006.0,0.04585456229735989,0.0,0.0,0.0,41.0,63.0,0.14113676966849734,1.0,66.0,0.0041685965724872626,28.0,68.0,0.06353470522067094,4.0,52.0,0.007642427049559981,37.0,133.0,0.1040164097134917,0.0,0.0,0.0,77.0,249.0,0.34668828161185733,15.0,40.0,0.037239462714219546,"nakov, p"
+136,0.0,0.0,0.0,7.0,23.0,0.05432595573440644,2.0,2.0,0.024144869215291753,0.0,0.0,0.0,0.0,0.0,0.0,4.0,16.0,0.04225352112676056,0.0,0.0,0.0,7.0,19.0,0.06639839034205232,0.0,0.0,0.0,11.0,26.0,0.08048289738430583,0.0,4.0,0.0,0.0,0.0,0.0,26.0,23.0,0.2535211267605634,21.0,2.0,0.2052313883299799,9.0,20.0,0.04627766599597586,11.0,113.0,0.06237424547283703,29.0,17.0,0.15895372233400404,0.0,0.0,0.0,0.0,6.0,0.0,1.0,0.0,0.006036217303822938,"nastase, v"
+137,0.0,0.0,0.0,2.0,91.0,0.006307821698906643,0.0,17.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,66.0,0.025231286795626574,0.0,0.0,0.0,3.0,59.0,0.01892346509671993,3.0,116.0,0.009461732548359966,35.0,23.0,0.14823380992430613,2.0,33.0,0.008410428931875524,0.0,0.0,0.0,39.0,31.0,0.1324642556770395,11.0,13.0,0.037216148023549195,27.0,41.0,0.10281749369217828,48.0,49.0,0.1997476871320437,28.0,18.0,0.13666947014297728,0.0,0.0,0.0,37.0,8.0,0.14928511354079058,8.0,16.0,0.025231286795626574,"yu, s"
+138,0.0,0.0,0.0,3.0,91.0,0.028458498023715417,0.0,15.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,100.0,0.011857707509881424,0.0,0.0,0.0,0.0,46.0,0.0,0.0,145.0,0.0,59.0,283.0,0.40948616600790516,0.0,42.0,0.0,0.0,0.0,0.0,40.0,508.0,0.2940711462450593,4.0,28.0,0.030830039525691702,12.0,223.0,0.10434782608695653,0.0,8.0,0.0,18.0,254.0,0.11857707509881424,0.0,0.0,0.0,0.0,25.0,0.0,1.0,11.0,0.0023715415019762848,"navigli, r"
+139,0.0,0.0,0.0,3.0,12.0,0.013460459899046552,16.0,12.0,0.1346045989904655,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,139.0,0.0,11.0,44.0,0.06505888951205833,0.0,0.0,0.0,0.0,0.0,0.0,0.0,18.0,0.0,9.0,35.0,0.08076275939427932,25.0,17.0,0.12114413909141897,10.0,3.0,0.1346045989904655,1.0,11.0,0.006730229949523276,0.0,0.0,0.0,45.0,343.0,0.4436343241727425,0.0,2.0,0.0,"labaka, g"
+140,0.0,0.0,0.0,3.0,0.0,0.004779855083484514,72.0,4.0,0.17615214423574824,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,34.0,47.0,0.11264525146745169,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12.0,22.0,0.03943380443874724,47.0,1.0,0.18641434825589606,10.0,1.0,0.04779855083484514,11.0,0.0,0.04445265227640598,0.0,0.0,0.0,66.0,92.0,0.38195025329610843,4.0,0.0,0.006373140111312686,"sarasola, k"
+141,0.0,0.0,0.0,2.0,167.0,0.015047291487532245,1.0,33.0,0.0051590713671539135,0.0,0.0,0.0,0.0,0.0,0.0,0.0,60.0,0.0,0.0,0.0,0.0,0.0,61.0,0.0,9.0,147.0,0.04643164230438522,0.0,15.0,0.0,0.0,43.0,0.0,0.0,0.0,0.0,0.0,22.0,0.0,0.0,48.0,0.0,0.0,29.0,0.0,25.0,16.0,0.12160668222577081,0.0,27.0,0.0,0.0,0.0,0.0,118.0,281.0,0.756725218032183,10.0,26.0,0.05503009458297507,"du, j"
+142,0.0,0.0,0.0,0.0,19.0,0.0,2.0,5.0,0.0332409972299169,0.0,0.0,0.0,0.0,0.0,0.0,2.0,19.0,0.02077562326869806,0.0,0.0,0.0,10.0,2.0,0.07756232686980609,0.0,216.0,0.0,28.0,106.0,0.25207756232686984,0.0,91.0,0.0,0.0,0.0,0.0,10.0,10.0,0.0664819944598338,1.0,81.0,0.008310249307479225,14.0,37.0,0.11218836565096954,0.0,22.0,0.0,55.0,59.0,0.41551246537396125,0.0,0.0,0.0,0.0,7.0,0.0,3.0,0.0,0.013850415512465374,"nissim, m"
+143,0.0,0.0,0.0,0.0,7.0,0.0,1.0,5.0,0.00831255195344971,0.0,0.0,0.0,0.0,0.0,0.0,53.0,113.0,0.2698808534220006,0.0,0.0,0.0,2.0,17.0,0.01662510390689942,0.0,4.0,0.0,13.0,10.0,0.1025214740925464,0.0,60.0,0.0,0.0,0.0,0.0,8.0,1.0,0.03325020781379884,10.0,24.0,0.031864782488223886,17.0,15.0,0.12468827930174564,0.0,0.0,0.0,64.0,43.0,0.3962316431144362,0.0,0.0,0.0,1.0,3.0,0.00831255195344971,3.0,28.0,0.00831255195344971,"di eugenio, b"
+144,0.0,0.0,0.0,0.0,5.0,0.0,0.0,12.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,3.0,1.0,0.0077269800386349004,0.0,50.0,0.0,202.0,139.0,0.3583386992916935,0.0,6.0,0.0,0.0,0.0,0.0,213.0,116.0,0.4352865421764327,11.0,13.0,0.03155183515775918,16.0,31.0,0.049581455247907275,2.0,8.0,0.0022537025112685126,59.0,37.0,0.10367031551835158,0.0,0.0,0.0,5.0,7.0,0.00643915003219575,5.0,4.0,0.0051513200257566,"mccarthy, d"
+145,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.010914051841746247,0.0,0.0,0.0,1.0,0.0,0.004092769440654843,3.0,0.0,0.01227830832196453,34.0,13.0,0.1819918144611187,0.0,22.0,0.0,0.0,0.0,0.0,77.0,39.0,0.407094133697135,82.0,56.0,0.3184174624829468,3.0,2.0,0.01637107776261937,1.0,1.0,0.005457025920873123,1.0,2.0,0.005457025920873123,0.0,0.0,0.0,8.0,2.0,0.03246930422919509,1.0,0.0,0.005457025920873123,"gonzalo, j"
+146,0.0,0.0,0.0,0.0,107.0,0.0,38.0,60.0,0.2640359785289424,0.0,0.0,0.0,0.0,0.0,0.0,24.0,212.0,0.08080661540693458,0.0,0.0,0.0,0.0,46.0,0.0,1.0,65.0,0.00812418395473669,0.0,10.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,6.0,37.0,0.07311765559263021,1.0,13.0,0.003046568983026259,2.0,10.0,0.012186275932105035,26.0,72.0,0.15581024227477153,5.0,84.0,0.03554330480197302,0.0,0.0,0.0,30.0,31.0,0.32293631220078345,14.0,144.0,0.044392862324096914,"yu, k"
+147,0.0,0.0,0.0,1.0,2.0,0.0041591570774989605,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,25.0,22.0,0.10189934839872454,0.0,0.0,0.0,15.0,9.0,0.049909884929987526,9.0,0.0,0.02772771384999307,90.0,86.0,0.2728407042839318,14.0,76.0,0.05822819908498545,0.0,0.0,0.0,45.0,31.0,0.13101344794121725,7.0,21.0,0.02772771384999307,0.0,38.0,0.0,10.0,132.0,0.04159157077498961,6.0,12.0,0.016636628309995842,0.0,0.0,0.0,24.0,4.0,0.09635380562872593,55.0,3.0,0.171911825869957,"strapparava, c"
+148,0.0,0.0,0.0,2.0,1.0,0.008264462809917356,1.0,30.0,0.004722550177095631,0.0,0.0,0.0,0.0,0.0,0.0,20.0,8.0,0.0885478158205431,0.0,0.0,0.0,3.0,12.0,0.018890200708382526,0.0,9.0,0.0,14.0,10.0,0.06375442739079103,11.0,0.0,0.07792207792207793,0.0,0.0,0.0,24.0,9.0,0.12750885478158205,48.0,30.0,0.3093270365997639,10.0,7.0,0.048406139315230225,7.0,1.0,0.029515938606847696,7.0,13.0,0.044864226682408505,0.0,0.0,0.0,42.0,7.0,0.1782762691853601,0.0,0.0,0.0,"lapalme, g"
+149,0.0,0.0,0.0,0.0,10.0,0.0,1.0,32.0,0.004606525911708254,0.0,0.0,0.0,0.0,0.0,0.0,23.0,36.0,0.06621880998080615,0.0,0.0,0.0,0.0,1.0,0.0,2.0,9.0,0.015355086372360844,7.0,19.0,0.03915547024952015,0.0,3.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,0.0,4.0,0.0,3.0,71.0,0.008637236084452975,9.0,3.0,0.0345489443378119,138.0,193.0,0.7531669865642995,0.0,0.0,0.0,1.0,0.0,0.004606525911708254,10.0,15.0,0.07370441458733207,"pradhan, s"
+150,0.0,0.0,0.0,10.0,46.0,0.011274070644131946,152.0,111.0,0.17446624321794188,0.0,0.0,0.0,0.0,0.0,0.0,27.0,119.0,0.04157313550023655,0.0,0.0,0.0,1.0,30.0,0.0009395058870109955,4.0,41.0,0.00422777649154948,174.0,526.0,0.20540618530411467,3.0,51.0,0.005637035322065973,0.0,0.0,0.0,88.0,25.0,0.14350952424092958,7.0,29.0,0.00634166473732422,76.0,298.0,0.0631347956071389,23.0,12.0,0.023534622469625437,243.0,549.0,0.21167738709991304,0.0,0.0,0.0,66.0,43.0,0.08009287686768736,30.0,28.0,0.028185176610329866,"palmer, m"
+151,0.0,0.0,0.0,0.0,50.0,0.0,14.0,4.0,0.1053368727912317,0.0,0.0,0.0,0.0,0.0,0.0,28.0,7.0,0.2795303161083335,0.0,0.0,0.0,0.0,29.0,0.0,0.0,83.0,0.0,2.0,0.0,0.004104034004853183,0.0,0.0,0.0,0.0,0.0,0.0,5.0,16.0,0.03573929612559647,0.0,1.0,0.0,2.0,2.0,0.005276615149096949,4.0,33.0,0.018810155855577087,0.0,27.0,0.0,0.0,0.0,0.0,116.0,78.0,0.5512027099653111,0.0,104.0,0.0,"bach, n"
+152,0.0,0.0,0.0,0.0,12.0,0.0,0.0,15.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.0,32.0,0.0,5.0,78.0,0.03879728419010669,0.0,432.0,0.0,0.0,0.0,0.0,50.0,46.0,0.39282250242483024,0.0,36.0,0.0,0.0,70.0,0.0,10.0,40.0,0.058195926285160036,1.0,52.0,0.005819592628516004,0.0,0.0,0.0,37.0,84.0,0.3588748787584869,21.0,18.0,0.1454898157129001,"hoste, v"
+153,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,1.0,3.0,0.009366219169528569,2.0,0.0,0.01248829222603809,57.0,45.0,0.42147986262878556,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,2.0,3.0,0.01248829222603809,49.0,91.0,0.32750546362784894,0.0,1.0,0.0,41.0,63.0,0.21667187012176087,0.0,0.0,0.0,0.0,16.0,0.0,0.0,0.0,0.0,"baker, c"
+154,0.0,0.0,0.0,4.0,28.0,0.013259668508287295,4.0,22.0,0.008839779005524863,0.0,0.0,0.0,0.0,0.0,0.0,0.0,30.0,0.0,0.0,0.0,0.0,2.0,19.0,0.006629834254143647,2.0,27.0,0.004419889502762431,28.0,91.0,0.061325966850828736,0.0,30.0,0.0,0.0,0.0,0.0,96.0,25.0,0.40939226519337024,14.0,12.0,0.041988950276243095,28.0,8.0,0.06795580110497239,5.0,10.0,0.014364640883977901,104.0,212.0,0.36187845303867405,0.0,0.0,0.0,1.0,1.0,0.0016574585635359118,4.0,33.0,0.00828729281767956,"erk, k"
+155,0.0,0.0,0.0,4.0,2.0,0.01654667359410178,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,15.0,0.04079269650411934,0.0,0.0,0.0,10.0,1.0,0.049640020782305345,0.0,10.0,0.0,101.0,103.0,0.5778549691976544,4.0,9.0,0.041366683985254456,0.0,0.0,0.0,1.0,25.0,0.004136668398525445,3.0,8.0,0.003918949009129369,46.0,51.0,0.20162497835176524,0.0,15.0,0.0,16.0,7.0,0.0641183601771444,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,"fellbaum, c"
+156,0.0,0.0,0.0,11.0,161.0,0.023749326397192707,10.0,34.0,0.022003052397399123,0.0,0.0,0.0,0.0,0.0,0.0,27.0,167.0,0.08067785879046345,0.0,0.0,0.0,21.0,185.0,0.04889567199422027,0.0,158.0,0.0,4.0,13.0,0.0048895671994220275,4.0,252.0,0.013446309798410576,0.0,0.0,0.0,95.0,124.0,0.2579537743361752,137.0,310.0,0.32380867732505747,21.0,103.0,0.04914015035419138,8.0,25.0,0.009284356765569208,20.0,129.0,0.09271371651211768,0.0,0.0,0.0,24.0,7.0,0.030653825134838098,18.0,42.0,0.04278371299494275,"radev, d"
+157,0.0,0.0,0.0,0.0,56.0,0.0,0.0,14.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,27.0,44.0,0.19901719901719903,0.0,0.0,0.0,0.0,39.0,0.0,0.0,2.0,0.0,61.0,11.0,0.29975429975429974,0.0,113.0,0.0,0.0,0.0,0.0,30.0,37.0,0.14987714987714987,29.0,18.0,0.21375921375921378,1.0,16.0,0.007371007371007371,7.0,23.0,0.049140049140049144,8.0,4.0,0.0515970515970516,0.0,0.0,0.0,3.0,3.0,0.022113022113022116,1.0,4.0,0.007371007371007371,"banerjee, s"
+158,0.0,0.0,0.0,17.0,202.0,0.06040158894270011,63.0,63.0,0.2448713065244599,0.0,0.0,0.0,0.0,0.0,0.0,16.0,230.0,0.07835881808782717,0.0,0.0,0.0,0.0,270.0,0.0,0.0,155.0,0.0,5.0,22.0,0.01523643685041084,0.0,92.0,0.0,0.0,0.0,0.0,54.0,67.0,0.11644991021385426,0.0,43.0,0.0,24.0,173.0,0.14120912009577188,31.0,70.0,0.0521303803667628,56.0,194.0,0.23420580072917233,0.0,0.0,0.0,32.0,42.0,0.04843010284594873,4.0,61.0,0.008706535343091907,"lee, k"
+159,0.0,0.0,0.0,4.0,678.0,0.0022651578889724146,133.0,246.0,0.08868964349899684,0.0,0.0,0.0,0.0,0.0,0.0,13.0,574.0,0.011883366771378358,0.0,0.0,0.0,12.0,744.0,0.0083636598977443,14.0,509.0,0.009409117384962336,28.0,141.0,0.045094066282004686,23.0,492.0,0.023000064718796825,0.0,0.0,0.0,113.0,479.0,0.09234874470425998,41.0,289.0,0.024742493864160223,27.0,153.0,0.017842474448521176,319.0,575.0,0.2035455943923652,50.0,367.0,0.04112132783057615,0.0,0.0,0.0,449.0,593.0,0.34154100433615936,144.0,357.0,0.0901532839811021,"wang, h"
+160,0.0,0.0,0.0,0.0,42.0,0.0,7.0,4.0,0.0477391158177845,0.0,0.0,0.0,0.0,0.0,0.0,10.0,0.0,0.037821482602118005,0.0,0.0,0.0,19.0,1.0,0.10556396032946713,0.0,3.0,0.0,2.0,3.0,0.005042864346949068,0.0,1.0,0.0,0.0,0.0,0.0,1.0,34.0,0.004034291477559254,7.0,4.0,0.038493864515044544,3.0,0.0,0.02017145738779627,14.0,4.0,0.06253151790216843,13.0,4.0,0.04992435703479577,0.0,0.0,0.0,47.0,119.0,0.38157673558581273,35.0,12.0,0.2471003530005043,"ittycheriah, a"
+161,0.0,0.0,0.0,8.0,74.0,0.01517500940910802,43.0,37.0,0.07935265336846069,0.0,0.0,0.0,0.0,0.0,0.0,15.0,64.0,0.050583364697026734,0.0,0.0,0.0,3.0,99.0,0.0037937523522770054,0.0,55.0,0.0,43.0,36.0,0.06375611592021077,0.0,97.0,0.0,0.0,0.0,0.0,25.0,148.0,0.0853594279262326,8.0,88.0,0.013067369213398572,6.0,29.0,0.026556266465939036,84.0,118.0,0.13454271735039522,13.0,46.0,0.02455400828001506,0.0,0.0,0.0,224.0,125.0,0.4837636432066241,10.0,75.0,0.01949567181031239,"lin, s"
+162,0.0,0.0,0.0,0.0,0.0,0.0,107.0,93.0,0.6388841639734648,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,40.0,14.0,0.15218007597664002,0.0,39.0,0.0,0.0,0.0,0.0,2.0,15.0,0.006236888359698361,0.0,0.0,0.0,11.0,6.0,0.037421330158190165,22.0,1.0,0.1465668764529115,3.0,36.0,0.011226399047457052,0.0,0.0,0.0,2.0,3.0,0.007484266031638034,0.0,6.0,0.0,"gojenola, k"
+163,0.0,0.0,0.0,12.0,354.0,0.07474375215939191,4.0,113.0,0.016468962340204996,0.0,0.0,0.0,0.0,0.0,0.0,18.0,168.0,0.11274904986755728,0.0,0.0,0.0,13.0,230.0,0.04940688702061499,0.0,608.0,0.0,23.0,78.0,0.1330185419785788,4.0,69.0,0.020269492111021536,0.0,0.0,0.0,29.0,60.0,0.14442013129102843,0.0,65.0,0.0,6.0,32.0,0.024185189450650697,15.0,187.0,0.061153979039502475,63.0,112.0,0.36358401474144875,0.0,0.0,0.0,0.0,380.0,0.0,0.0,105.0,0.0,"huang, s"
+164,0.0,0.0,0.0,0.0,135.0,0.0,2.0,23.0,0.013592233009708736,0.0,0.0,0.0,0.0,0.0,0.0,2.0,143.0,0.005825242718446601,0.0,0.0,0.0,0.0,132.0,0.0,0.0,123.0,0.0,14.0,28.0,0.06796116504854367,0.0,224.0,0.0,0.0,0.0,0.0,0.0,33.0,0.0,3.0,61.0,0.027184466019417472,0.0,25.0,0.0,84.0,250.0,0.426504854368932,10.0,110.0,0.06252427184466018,0.0,0.0,0.0,19.0,32.0,0.17213592233009709,16.0,35.0,0.22427184466019415,"chen, s"
+165,0.0,0.0,0.0,0.0,755.0,0.0,72.0,265.0,0.06682175050886885,0.0,0.0,0.0,0.0,0.0,0.0,11.0,674.0,0.014480953765629542,0.0,0.0,0.0,22.0,436.0,0.019482407676650187,0.0,700.0,0.0,21.0,67.0,0.03646408839779005,4.0,328.0,0.005234079674323931,0.0,0.0,0.0,110.0,342.0,0.1808665309683047,4.0,347.0,0.004361733061936609,37.0,169.0,0.043559174178540265,314.0,477.0,0.27124164001163126,101.0,288.0,0.09287583599883686,0.0,0.0,0.0,37.0,672.0,0.04006978772899098,211.0,308.0,0.2245420180284966,"wang, x"
+166,0.0,0.0,0.0,2.0,0.0,0.013701311411235076,45.0,8.0,0.2762771579565473,0.0,0.0,0.0,0.0,0.0,0.0,4.0,7.0,0.003914660403210021,0.0,0.0,0.0,6.0,6.0,0.027402622822470152,3.0,4.0,0.020551967116852615,29.0,1.0,0.23063874208912377,3.0,1.0,0.010275983558426308,0.0,0.0,0.0,35.0,56.0,0.18610948000260977,18.0,7.0,0.08220786846741046,8.0,3.0,0.02675017942193515,0.0,4.0,0.0,23.0,12.0,0.07878254061460169,0.0,0.0,0.0,0.0,0.0,0.0,7.0,12.0,0.04338748613557774,"ciaramita, m"
+167,0.0,0.0,0.0,6.0,97.0,0.017877412031782065,35.0,49.0,0.1812570942111237,0.0,0.0,0.0,0.0,0.0,0.0,2.0,119.0,0.014897843359818388,0.0,0.0,0.0,4.0,50.0,0.019863791146424517,1.0,166.0,0.004965947786606129,46.0,59.0,0.15749148694665152,0.0,140.0,0.0,0.0,0.0,0.0,21.0,62.0,0.15394438138479002,8.0,37.0,0.05959137343927355,14.0,39.0,0.05959137343927355,6.0,58.0,0.04469353007945516,22.0,59.0,0.09311152099886492,0.0,0.0,0.0,36.0,15.0,0.1182250283768445,16.0,19.0,0.07448921679909194,"park, s"
+168,0.0,0.0,0.0,12.0,258.0,0.04342319522344853,22.0,14.0,0.08865569024787408,0.0,0.0,0.0,0.0,0.0,0.0,10.0,504.0,0.04523249502442554,0.0,0.0,0.0,3.0,266.0,0.016283698208793197,0.0,406.0,0.0,18.0,88.0,0.05608829383028768,42.0,270.0,0.15198118328206983,0.0,0.0,0.0,36.0,117.0,0.1166998371630179,7.0,104.0,0.032567396417586394,21.0,119.0,0.08141849104396598,43.0,159.0,0.17586394065496652,5.0,70.0,0.01809299800977022,0.0,0.0,0.0,33.0,123.0,0.12122308666546044,13.0,160.0,0.05246969422833365,"lee, s"
+169,0.0,0.0,0.0,4.0,426.0,0.008649895480429612,43.0,33.0,0.053989764290348144,0.0,0.0,0.0,0.0,0.0,0.0,49.0,493.0,0.07107330786419665,0.0,0.0,0.0,5.0,206.0,0.006054926836300728,0.0,258.0,0.0,22.0,46.0,0.04757442514236286,39.0,240.0,0.06343256685648381,0.0,0.0,0.0,93.0,141.0,0.13465003964535427,6.0,292.0,0.00735241115836517,43.0,26.0,0.05161104303323002,124.0,309.0,0.20875081092770126,50.0,71.0,0.07064081309017516,0.0,0.0,0.0,88.0,70.0,0.15101275859583363,80.0,113.0,0.12520723707921863,"wu, c"
+170,0.0,0.0,0.0,0.0,0.0,0.0,70.0,0.0,0.3776861417015885,0.0,0.0,0.0,0.0,0.0,0.0,14.0,0.0,0.06566124159438651,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.007035133027969984,4.0,0.0,0.018425348406588053,0.0,1.0,0.0,0.0,0.0,0.0,29.0,2.0,0.13601257187408636,0.0,4.0,0.0,4.0,5.0,0.025795487769223274,0.0,3.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,68.0,78.0,0.3271732774583374,6.0,40.0,0.042210798167819905,"ambati, v"
+171,0.0,0.0,0.0,2.0,3.0,0.0015007120261388087,230.0,27.0,0.36167636246461526,0.0,0.0,0.0,0.0,0.0,0.0,48.0,9.0,0.047655944118941286,0.0,0.0,0.0,0.0,1.0,0.0,3.0,11.0,0.0017150994584443529,40.0,18.0,0.036955629164313455,1.0,18.0,0.0010004746840925392,0.0,0.0,0.0,15.0,6.0,0.013006170893203009,101.0,149.0,0.1827533756275705,43.0,53.0,0.038665964457595466,35.0,5.0,0.033158589529924155,10.0,5.0,0.010771777432063005,0.0,0.0,0.0,192.0,368.0,0.26980593389764146,1.0,20.0,0.001333966245456719,"lavie, a"
+172,0.0,0.0,0.0,1.0,49.0,0.0014314166504576424,68.0,26.0,0.16461291480262888,0.0,0.0,0.0,0.0,0.0,0.0,6.0,9.0,0.011187651189103153,0.0,0.0,0.0,23.0,7.0,0.09876774888157734,15.0,77.0,0.03946620193404643,16.0,32.0,0.030775457984839313,0.0,10.0,0.0,0.0,0.0,0.0,33.0,18.0,0.12238612361412844,33.0,51.0,0.08079970303241166,22.0,50.0,0.04484423877790872,15.0,17.0,0.027789931828170515,0.0,73.0,0.0,0.0,0.0,0.0,137.0,186.0,0.3120934453836764,21.0,70.0,0.06584516592105155,"carbonell, j"
+173,0.0,0.0,0.0,2.0,2.0,0.0025456088247772595,65.0,53.0,0.24327535002121342,0.0,0.0,0.0,0.0,0.0,0.0,12.0,24.0,0.04582095884599067,0.0,0.0,0.0,1.0,0.0,0.0025456088247772595,0.0,0.0,0.0,9.0,7.0,0.025965210012728043,0.0,0.0,0.0,0.0,0.0,0.0,12.0,0.0,0.023419601187950785,0.0,2.0,0.0,1.0,11.0,0.005091217649554519,27.0,31.0,0.08061094611794654,10.0,0.0,0.023674162070428512,0.0,0.0,0.0,174.0,22.0,0.5043699618158677,23.0,7.0,0.04268137462876538,"moore, r"
+174,0.0,0.0,0.0,0.0,1.0,0.0,27.0,12.0,0.10150309460654287,0.0,0.0,0.0,0.0,0.0,0.0,20.0,43.0,0.074447391688771,0.0,0.0,0.0,26.0,5.0,0.07780725022104332,1.0,0.0,0.0021220159151193636,30.0,14.0,0.06348364279398762,0.0,71.0,0.0,0.0,0.0,0.0,100.0,4.0,0.28328912466843503,0.0,17.0,0.0,11.0,18.0,0.03023872679045093,12.0,2.0,0.03713527851458886,19.0,12.0,0.0707338638373121,0.0,0.0,0.0,6.0,6.0,0.01927497789566755,54.0,2.0,0.23996463306808136,"srihari, r"
+175,0.0,0.0,0.0,0.0,60.0,0.0,65.0,196.0,0.5267558528428093,0.0,0.0,0.0,0.0,0.0,0.0,0.0,22.0,0.0,0.0,0.0,0.0,0.0,21.0,0.0,1.0,44.0,0.005016722408026756,0.0,15.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,30.0,0.0,2.0,9.0,0.010033444816053512,5.0,35.0,0.025083612040133776,15.0,4.0,0.1003344481605351,0.0,12.0,0.0,0.0,0.0,0.0,8.0,43.0,0.05685618729096989,38.0,36.0,0.2759197324414715,"petrov, s"
+176,0.0,0.0,0.0,0.0,80.0,0.0,122.0,359.0,0.5919689119170986,0.0,0.0,0.0,0.0,0.0,0.0,0.0,58.0,0.0,0.0,0.0,0.0,0.0,17.0,0.0,5.0,37.0,0.019430051813471506,9.0,73.0,0.034974093264248704,0.0,36.0,0.0,0.0,0.0,0.0,0.0,45.0,0.0,0.0,10.0,0.0,0.0,174.0,0.0,2.0,43.0,0.007772020725388602,1.0,167.0,0.007772020725388602,0.0,0.0,0.0,55.0,25.0,0.29533678756476683,8.0,65.0,0.04274611398963731,"kuhn, j"
+177,0.0,0.0,0.0,0.0,526.0,0.0,31.0,219.0,0.27823240589198034,0.0,0.0,0.0,0.0,0.0,0.0,1.0,62.0,0.004909983633387889,0.0,0.0,0.0,0.0,166.0,0.0,4.0,621.0,0.03927986906710311,8.0,140.0,0.07855973813420622,0.0,258.0,0.0,0.0,0.0,0.0,8.0,229.0,0.062193126022913256,0.0,79.0,0.0,5.0,80.0,0.04909983633387889,9.0,42.0,0.029459901800327332,16.0,127.0,0.15711947626841244,0.0,0.0,0.0,9.0,216.0,0.06546644844517184,27.0,83.0,0.23567921440261866,"sch{\""u}tze, h"
+178,0.0,0.0,0.0,9.0,29.0,0.09403372243839168,1.0,57.0,0.019455252918287935,0.0,0.0,0.0,0.0,0.0,0.0,2.0,19.0,0.003242542153047989,0.0,0.0,0.0,0.0,4.0,0.0,0.0,30.0,0.0,9.0,68.0,0.10700389105058365,6.0,1.0,0.03891050583657587,0.0,0.0,0.0,4.0,17.0,0.03891050583657587,24.0,4.0,0.21725032425421528,23.0,31.0,0.17671854734111542,4.0,3.0,0.030479896238651098,17.0,39.0,0.23670557717250323,0.0,0.0,0.0,1.0,6.0,0.009727626459143967,8.0,8.0,0.027561608300907908,"poibeau, t"
+179,0.0,0.0,0.0,0.0,62.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,66.0,0.0,0.0,0.0,0.0,1.0,58.0,0.0072289156626506035,0.0,59.0,0.0,8.0,9.0,0.03795180722891567,1.0,55.0,0.004819277108433735,0.0,0.0,0.0,3.0,23.0,0.02168674698795181,1.0,53.0,0.004819277108433735,11.0,26.0,0.01987951807228916,2.0,41.0,0.014457831325301207,188.0,33.0,0.8289156626506025,0.0,0.0,0.0,0.0,33.0,0.0,17.0,36.0,0.060240963855421686,"yang, x"
+180,0.0,0.0,0.0,1.0,2.0,0.01,19.0,5.0,0.09333333333333334,0.0,0.0,0.0,0.0,0.0,0.0,35.0,57.0,0.14,0.0,0.0,0.0,4.0,32.0,0.02,1.0,0.0,0.005,19.0,19.0,0.09166666666666666,11.0,7.0,0.055,0.0,0.0,0.0,10.0,35.0,0.05,10.0,6.0,0.04833333333333333,10.0,2.0,0.04166666666666666,20.0,17.0,0.1,43.0,35.0,0.285,0.0,0.0,0.0,3.0,0.0,0.01,9.0,0.0,0.05,"pulman, s"
+181,0.0,0.0,0.0,6.0,62.0,0.010808619874349792,199.0,58.0,0.35776531784097815,0.0,0.0,0.0,0.0,0.0,0.0,10.0,61.0,0.01655069918259812,0.0,0.0,0.0,13.0,3.0,0.010741066000135108,0.0,46.0,0.0,63.0,98.0,0.10632979801391611,2.0,395.0,0.0021617239748699586,0.0,0.0,0.0,8.0,55.0,0.006890495169897994,17.0,102.0,0.03202053637776127,51.0,68.0,0.059447409308923865,10.0,12.0,0.016212929811524692,267.0,184.0,0.25298925893399987,0.0,0.0,0.0,54.0,78.0,0.09525096264270756,20.0,12.0,0.0328311828683375,"joshi, a"
+182,0.0,0.0,0.0,0.0,27.0,0.0,8.0,29.0,0.04276370954217676,0.0,0.0,0.0,0.0,0.0,0.0,8.0,65.0,0.024316619151433842,0.0,0.0,0.0,22.0,28.0,0.10045279221868189,0.0,23.0,0.0,3.0,25.0,0.015093073956062386,1.0,8.0,0.002012409860808318,0.0,0.0,0.0,10.0,26.0,0.045279221868187156,6.0,56.0,0.04561462351165521,29.0,97.0,0.08636592319302364,1.0,46.0,0.01006204930404159,194.0,415.0,0.6280395773939292,0.0,0.0,0.0,0.0,73.0,0.0,0.0,8.0,0.0,"webber, b"
+183,0.0,0.0,0.0,6.0,339.0,0.013146842931745973,325.0,153.0,0.47529489099076067,0.0,0.0,0.0,0.0,0.0,0.0,4.0,41.0,0.00613519336814812,0.0,0.0,0.0,13.0,35.0,0.020450644560493732,18.0,58.0,0.02008545447905635,103.0,82.0,0.15264945404082822,0.0,47.0,0.0,0.0,0.0,0.0,1.0,61.0,0.0010955702443121644,37.0,139.0,0.0551437022970456,61.0,132.0,0.09049410218018478,11.0,55.0,0.015484059452945256,16.0,50.0,0.02103494869079356,0.0,0.0,0.0,109.0,1294.0,0.12109703100463791,7.0,36.0,0.007888105759047583,"van genabith, j"
+184,0.0,0.0,0.0,8.0,857.0,0.004597102713655519,81.0,463.0,0.08517093145152778,0.0,0.0,0.0,0.0,0.0,0.0,40.0,813.0,0.03974868608978913,0.0,0.0,0.0,67.0,538.0,0.05363286499264771,10.0,789.0,0.010912314522313605,18.0,99.0,0.016113077188267322,0.0,440.0,0.0,0.0,0.0,0.0,98.0,372.0,0.07838292303687389,96.0,653.0,0.1314121280772234,6.0,219.0,0.007894014760822608,180.0,631.0,0.23656293753034124,78.0,373.0,0.10517613784272474,0.0,0.0,0.0,213.0,1050.0,0.1507602034713965,76.0,363.0,0.0796366783224163,"liu, y"
+185,0.0,0.0,0.0,16.0,953.0,0.034469659664870485,12.0,245.0,0.027575727731896392,0.0,0.0,0.0,0.0,0.0,0.0,41.0,859.0,0.06481702941879931,0.0,0.0,0.0,31.0,504.0,0.04579540498332794,0.0,907.0,0.0,18.0,120.0,0.035159052858167894,13.0,633.0,0.02462118547490749,0.0,0.0,0.0,92.0,379.0,0.15028771613883532,7.0,385.0,0.014772711284944494,12.0,212.0,0.022060582185517114,223.0,758.0,0.3757333595959312,25.0,253.0,0.04973479465931314,0.0,0.0,0.0,14.0,587.0,0.025957764114973896,45.0,275.0,0.12901501188851525,"wang, y"
+186,0.0,0.0,0.0,0.0,103.0,0.0,28.0,14.0,0.08262967926529471,0.0,0.0,0.0,0.0,0.0,0.0,5.0,25.0,0.022445674069080056,0.0,0.0,0.0,17.0,11.0,0.05266100454668783,0.0,15.0,0.0,12.0,7.0,0.06733702220724017,0.0,96.0,0.0,0.0,0.0,0.0,35.0,66.0,0.10189349404326342,20.0,27.0,0.06129395611171862,92.0,252.0,0.21432741085449777,11.0,8.0,0.04575464329466319,19.0,72.0,0.04518733504896117,0.0,0.0,0.0,13.0,45.0,0.03971157719914164,83.0,79.0,0.26675820335945144,"ananiadou, s"
+187,0.0,0.0,0.0,3.0,711.0,0.007983576642335767,11.0,127.0,0.03814375506893755,0.0,0.0,0.0,0.0,0.0,0.0,48.0,753.0,0.10662510137875102,0.0,0.0,0.0,2.0,753.0,0.004257907542579076,9.0,1542.0,0.03193430656934307,2.0,119.0,0.005322384428223845,0.0,339.0,0.0,0.0,0.0,0.0,10.0,241.0,0.02572485806974858,26.0,408.0,0.07398114355231145,2.0,251.0,0.00390308191403082,49.0,289.0,0.12984083536090837,1.0,298.0,0.003548256285482563,0.0,0.0,0.0,175.0,172.0,0.5261557177615572,12.0,180.0,0.04257907542579076,"liu, z"
+188,0.0,0.0,0.0,0.0,419.0,0.0,52.0,51.0,0.20929027801761355,0.0,0.0,0.0,0.0,0.0,0.0,4.0,109.0,0.013210153686755313,0.0,0.0,0.0,7.0,114.0,0.02486617164565706,0.0,183.0,0.0,1.0,40.0,0.0031082714557071323,0.0,24.0,0.0,0.0,0.0,0.0,0.0,91.0,0.0,0.0,39.0,0.0,3.0,41.0,0.011656017958901746,5.0,104.0,0.015541357278535662,5.0,215.0,0.017613538249007084,0.0,0.0,0.0,181.0,637.0,0.6736314971507513,6.0,109.0,0.031082714557071324,"xiong, d"
+189,0.0,0.0,0.0,3.0,588.0,0.0040202649197948435,172.0,694.0,0.16744403390945525,0.0,0.0,0.0,0.0,0.0,0.0,17.0,186.0,0.016550090586488774,0.0,0.0,0.0,12.0,214.0,0.015277006695220407,0.0,655.0,0.0,14.0,85.0,0.014405949295931524,0.0,263.0,0.0,0.0,0.0,0.0,68.0,228.0,0.07511194958483366,6.0,161.0,0.006030397379692266,10.0,214.0,0.010385684376136678,41.0,249.0,0.050253311497435546,161.0,487.0,0.1536794125888243,0.0,0.0,0.0,457.0,1072.0,0.43524849936215265,40.0,237.0,0.05159339980403383,"zhang, m"
+190,0.0,0.0,0.0,2.0,65.0,0.004835286286625724,78.0,5.0,0.15408445633380644,0.0,0.0,0.0,0.0,0.0,0.0,9.0,50.0,0.015634092326756506,0.0,0.0,0.0,4.0,39.0,0.009670572573251449,0.0,12.0,0.0,0.0,2.0,0.0,0.0,37.0,0.0,0.0,0.0,0.0,24.0,16.0,0.05318814915288297,0.0,34.0,0.0,4.0,3.0,0.010476453621022402,8.0,46.0,0.012894096764335264,50.0,0.0,0.0722990425714513,0.0,0.0,0.0,322.0,81.0,0.6669178503698682,0.0,18.0,0.0,"aw, a"
+191,0.0,0.0,0.0,2.0,425.0,0.0015664133846294162,105.0,141.0,0.0869359428469326,0.0,0.0,0.0,0.0,0.0,0.0,33.0,586.0,0.028508723600255373,0.0,0.0,0.0,15.0,335.0,0.013680010225763565,3.0,455.0,0.0037593921231105984,24.0,55.0,0.02062444289762064,2.0,294.0,0.0015664133846294162,0.0,0.0,0.0,183.0,242.0,0.16666638412456988,8.0,446.0,0.006787791333394136,19.0,149.0,0.013952267790234873,107.0,142.0,0.0947530915473689,52.0,290.0,0.05534660625690604,0.0,0.0,0.0,458.0,537.0,0.3822252088805468,129.0,135.0,0.12362731160403782,"li, h"
+192,0.0,0.0,0.0,0.0,74.0,0.0,65.0,325.0,0.1015824400204186,0.0,0.0,0.0,0.0,0.0,0.0,7.0,26.0,0.009698825931597753,0.0,0.0,0.0,1.0,50.0,0.0010209290454313426,23.0,15.0,0.02399183256763655,28.0,65.0,0.04134762633996937,0.0,10.0,0.0,0.0,0.0,0.0,84.0,54.0,0.11893823379275138,0.0,148.0,0.0,11.0,20.0,0.014037774374680961,59.0,131.0,0.05339458907605921,161.0,254.0,0.22383869321082184,0.0,0.0,0.0,229.0,382.0,0.34900459418070445,60.0,87.0,0.06314446145992854,"wu, d"
+193,0.0,0.0,0.0,2.0,301.0,0.017361111111111112,5.0,21.0,0.02604166666666667,0.0,0.0,0.0,0.0,0.0,0.0,13.0,129.0,0.08159722222222224,0.0,0.0,0.0,12.0,137.0,0.06250000000000001,0.0,306.0,0.0,6.0,23.0,0.05902777777777778,9.0,349.0,0.04687500000000001,0.0,0.0,0.0,20.0,103.0,0.1545138888888889,2.0,79.0,0.01388888888888889,10.0,36.0,0.05729166666666668,21.0,145.0,0.19270833333333337,22.0,69.0,0.20833333333333337,0.0,0.0,0.0,1.0,41.0,0.006944444444444445,11.0,129.0,0.07291666666666667,"zhang, q"
+194,0.0,0.0,0.0,0.0,176.0,0.0,2.0,23.0,0.006231500233681259,0.0,0.0,0.0,0.0,0.0,0.0,18.0,339.0,0.05452562704471102,0.0,0.0,0.0,25.0,166.0,0.07010437762891417,0.0,129.0,0.0,9.0,157.0,0.02648387599314535,28.0,134.0,0.08334631562548685,0.0,0.0,0.0,19.0,110.0,0.07555694033338527,12.0,167.0,0.037389001402087556,6.0,73.0,0.018694500701043778,130.0,352.0,0.42124941579685315,15.0,217.0,0.07010437762891417,0.0,0.0,0.0,35.0,253.0,0.09658825362205953,10.0,79.0,0.03972581398971803,"liu, c"
+195,0.0,0.0,0.0,0.0,94.0,0.0,1.0,29.0,0.02566295979469632,0.0,0.0,0.0,0.0,0.0,0.0,0.0,258.0,0.0,0.0,0.0,0.0,7.0,54.0,0.0701454234388366,0.0,152.0,0.0,23.0,205.0,0.1894781864841745,0.0,46.0,0.0,0.0,0.0,0.0,40.0,86.0,0.4059024807527801,2.0,480.0,0.008554319931565439,14.0,172.0,0.08768177929854576,0.0,92.0,0.0,6.0,121.0,0.07057313943541488,0.0,0.0,0.0,27.0,1186.0,0.13686911890504705,1.0,154.0,0.0051325919589392645,"specia, l"
+196,0.0,0.0,0.0,1.0,204.0,0.0028089887640449437,154.0,390.0,0.7556179775280899,0.0,0.0,0.0,0.0,0.0,0.0,4.0,60.0,0.016853932584269662,0.0,0.0,0.0,0.0,64.0,0.0,1.0,592.0,0.0028089887640449437,0.0,110.0,0.0,1.0,142.0,0.0056179775280898875,0.0,0.0,0.0,0.0,76.0,0.0,0.0,127.0,0.0,2.0,287.0,0.011235955056179775,4.0,173.0,0.011235955056179775,0.0,224.0,0.0,0.0,0.0,0.0,42.0,147.0,0.19382022471910113,0.0,170.0,0.0,"s{\o}gaard, a"
+197,0.0,0.0,0.0,0.0,5.0,0.0,25.0,31.0,0.1489841986455982,0.0,0.0,0.0,0.0,0.0,0.0,4.0,15.0,0.040632054176072234,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,29.0,44.0,0.2483069977426637,0.0,1.0,0.0,0.0,0.0,0.0,14.0,16.0,0.08126410835214447,1.0,29.0,0.004514672686230248,20.0,29.0,0.09706546275395034,0.0,43.0,0.0,3.0,9.0,0.022573363431151242,0.0,0.0,0.0,62.0,188.0,0.35665914221218964,0.0,0.0,0.0,"volk, m"
+198,0.0,0.0,0.0,0.0,179.0,0.0,0.0,14.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,116.0,0.0,0.0,0.0,0.0,0.0,97.0,0.0,0.0,100.0,0.0,1.0,39.0,0.009433962264150943,1.0,177.0,0.009433962264150943,0.0,0.0,0.0,36.0,81.0,0.33962264150943394,0.0,61.0,0.0,2.0,28.0,0.018867924528301886,56.0,142.0,0.5031446540880503,0.0,54.0,0.0,0.0,0.0,0.0,1.0,5.0,0.006289308176100628,12.0,60.0,0.11320754716981132,"sun, c"
+199,0.0,0.0,0.0,11.0,644.0,0.03898536782947699,91.0,103.0,0.3635765277707458,0.0,0.0,0.0,0.0,0.0,0.0,5.0,671.0,0.010632373044402815,0.0,0.0,0.0,17.0,736.0,0.04784567869981266,0.0,854.0,0.0,3.0,78.0,0.01240443521846995,0.0,536.0,0.0,0.0,0.0,0.0,2.0,166.0,0.0035441243481342715,0.0,352.0,0.0,6.0,131.0,0.010632373044402815,44.0,240.0,0.11341197914029669,3.0,275.0,0.005316186522201408,0.0,0.0,0.0,107.0,372.0,0.32985671611563966,18.0,527.0,0.06379423826641689,"wang, w"
+200,0.0,0.0,0.0,2.0,842.0,0.006031752870468106,27.0,197.0,0.113095366321277,0.0,0.0,0.0,0.0,0.0,0.0,13.0,908.0,0.07690484909846836,0.0,0.0,0.0,6.0,623.0,0.011761918097412807,6.0,789.0,0.01357144395855324,45.0,60.0,0.16180177075030694,10.0,888.0,0.03015876435234053,0.0,0.0,0.0,17.0,431.0,0.07162706533680877,15.0,480.0,0.04825402296374485,7.0,198.0,0.017341289502595805,94.0,303.0,0.19254216841515687,36.0,768.0,0.06604769393162577,0.0,0.0,0.0,57.0,342.0,0.10943323064992133,28.0,306.0,0.08142866375131944,"li, j"
+201,0.0,0.0,0.0,4.0,7.0,0.010021295252411373,2.0,1.0,0.003757985719654265,0.0,0.0,0.0,0.0,0.0,0.0,29.0,4.0,0.09207065013152949,0.0,0.0,0.0,117.0,42.0,0.3545033195540523,0.0,2.0,0.0,8.0,6.0,0.014697899703536682,3.0,18.0,0.006889640486032819,0.0,0.0,0.0,29.0,13.0,0.18602029312288612,7.0,3.0,0.03006388575723412,3.0,10.0,0.01503194287861706,2.0,0.0,0.00751597143930853,82.0,93.0,0.20719027934360515,0.0,0.0,0.0,5.0,0.0,0.019625036535972272,17.0,11.0,0.05261180007515971,"harabagiu, s"
+202,0.0,0.0,0.0,0.0,88.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,13.0,0.08664259927797835,0.0,0.0,0.0,19.0,3.0,0.20577617328519857,0.0,10.0,0.0,6.0,13.0,0.06498194945848376,14.0,59.0,0.1516245487364621,0.0,0.0,0.0,15.0,19.0,0.1263537906137184,12.0,85.0,0.12996389891696752,3.0,8.0,0.03249097472924188,8.0,1.0,0.08664259927797835,8.0,134.0,0.05776173285198556,0.0,0.0,0.0,4.0,9.0,0.02888086642599278,4.0,0.0,0.02888086642599278,"kosseim, l"
+203,0.0,0.0,0.0,0.0,125.0,0.0,4.0,205.0,0.049450549450549455,0.0,0.0,0.0,0.0,0.0,0.0,2.0,28.0,0.03296703296703297,0.0,0.0,0.0,2.0,4.0,0.016483516483516484,0.0,56.0,0.0,0.0,19.0,0.0,0.0,28.0,0.0,0.0,0.0,0.0,9.0,13.0,0.07417582417582418,0.0,32.0,0.0,3.0,44.0,0.04120879120879121,4.0,90.0,0.020604395604395604,0.0,87.0,0.0,0.0,0.0,0.0,69.0,136.0,0.5865384615384615,27.0,47.0,0.17857142857142855,"lopez, a"
+204,0.0,0.0,0.0,0.0,40.0,0.0,14.0,24.0,0.040525739320920046,0.0,0.0,0.0,0.0,0.0,0.0,7.0,28.0,0.02190580503833516,0.0,0.0,0.0,3.0,10.0,0.009857612267250822,0.0,5.0,0.0,77.0,16.0,0.1538882803943045,6.0,149.0,0.019715224534501644,0.0,0.0,0.0,51.0,122.0,0.11883899233296824,10.0,6.0,0.014238773274917856,17.0,15.0,0.04326396495071194,28.0,25.0,0.0755750273822563,21.0,71.0,0.04874041621029573,0.0,0.0,0.0,185.0,154.0,0.4271631982475356,12.0,41.0,0.026286966046002194,"resnik, p"
+205,0.0,0.0,0.0,2.0,13.0,0.002946561993559657,42.0,4.0,0.045854117118823624,0.0,0.0,0.0,0.0,0.0,0.0,37.0,19.0,0.05023888199019216,0.0,0.0,0.0,9.0,6.0,0.0154694504661882,1.0,2.0,0.0014732809967798286,16.0,2.0,0.016206090964578115,0.0,0.0,0.0,0.0,0.0,0.0,22.0,0.0,0.029465619935596574,62.0,11.0,0.09200990606079738,34.0,10.0,0.05844014620559987,6.0,3.0,0.009576326479068887,151.0,18.0,0.2880264348704565,0.0,0.0,0.0,288.0,53.0,0.33136194304716604,45.0,12.0,0.05893123987119315,"marcu, d"
+206,0.0,0.0,0.0,0.0,113.0,0.0,20.0,83.0,0.1603269412134549,0.0,0.0,0.0,0.0,0.0,0.0,8.0,112.0,0.030807922037095252,0.0,0.0,0.0,7.0,144.0,0.05658597925180761,0.0,172.0,0.0,10.0,38.0,0.05658597925180761,0.0,79.0,0.0,0.0,0.0,0.0,10.0,17.0,0.06601697579377554,14.0,55.0,0.06601697579377554,14.0,32.0,0.05910091166299906,51.0,48.0,0.40144608613643507,0.0,25.0,0.0,0.0,0.0,0.0,17.0,53.0,0.10311222885884942,0.0,16.0,0.0,"ma, w"
+207,0.0,0.0,0.0,0.0,4.0,0.0,28.0,14.0,0.23552123552123552,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,15.0,0.007722007722007722,0.0,32.0,0.0,7.0,13.0,0.05405405405405405,0.0,4.0,0.0,0.0,0.0,0.0,8.0,0.0,0.09266409266409267,0.0,18.0,0.0,53.0,38.0,0.4092664092664092,5.0,386.0,0.03861003861003861,16.0,21.0,0.13127413127413126,0.0,0.0,0.0,0.0,16.0,0.0,4.0,9.0,0.03088803088803089,"rozovskaya, a"
+208,0.0,0.0,0.0,0.0,119.0,0.0,5.0,35.0,0.012864493996569469,0.0,0.0,0.0,0.0,0.0,0.0,1.0,444.0,0.002572898799313894,0.0,0.0,0.0,0.0,169.0,0.0,12.0,493.0,0.030017152658662095,53.0,29.0,0.13636363636363638,0.0,243.0,0.0,0.0,0.0,0.0,35.0,49.0,0.08662092624356775,41.0,158.0,0.09090909090909093,1.0,22.0,0.002572898799313894,39.0,154.0,0.08027444253859349,112.0,22.0,0.23413379073756435,0.0,0.0,0.0,110.0,144.0,0.26861063464837054,27.0,71.0,0.05506003430531733,"fung, p"
+209,0.0,0.0,0.0,6.0,562.0,0.00749217335462062,72.0,119.0,0.10351686184967489,0.0,0.0,0.0,0.0,0.0,0.0,33.0,342.0,0.03340260620601693,0.0,0.0,0.0,12.0,429.0,0.014172694595824004,8.0,364.0,0.009989564472827492,52.0,81.0,0.07716938555259237,25.0,364.0,0.019666955055879128,0.0,0.0,0.0,145.0,257.0,0.15795998822658472,49.0,271.0,0.05144625703506159,5.0,50.0,0.005868869127786152,71.0,153.0,0.09201102419793608,8.0,168.0,0.008366260245993025,0.0,0.0,0.0,349.0,639.0,0.40272215631884545,16.0,126.0,0.016215203760357483,"zhou, m"
+210,0.0,0.0,0.0,21.0,954.0,0.030492588196858083,1.0,233.0,0.0011628529397106895,0.0,0.0,0.0,0.0,0.0,0.0,32.0,1060.0,0.03364521172229595,0.0,0.0,0.0,13.0,397.0,0.018915741152627216,8.0,1047.0,0.006977117638264137,42.0,151.0,0.050648705818510026,95.0,553.0,0.1262858292525809,0.0,0.0,0.0,56.0,281.0,0.09372594694068159,22.0,394.0,0.03411035289818023,14.0,150.0,0.018657329388247063,198.0,532.0,0.28390223913793833,56.0,416.0,0.10039297046168953,0.0,0.0,0.0,164.0,640.0,0.18069442624282214,20.0,290.0,0.020388688209594087,"chen, y"
+211,0.0,0.0,0.0,8.0,13.0,0.03285356695869837,19.0,14.0,0.060231539424280355,0.0,0.0,0.0,0.0,0.0,0.0,44.0,26.0,0.14791927409261577,0.0,0.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,17.0,99.0,0.04458698372966208,0.0,2.0,0.0,0.0,0.0,0.0,27.0,13.0,0.10540519399249061,9.0,4.0,0.02855131414267835,39.0,49.0,0.14099655819774717,6.0,12.0,0.01877346683354193,1.0,1.0,0.004106695869837296,0.0,0.0,0.0,108.0,111.0,0.40836201501877345,3.0,2.0,0.008213391739674593,"boitet, c"
+212,0.0,0.0,0.0,0.0,12.0,0.0,9.0,0.0,0.09060402684563759,0.0,0.0,0.0,0.0,0.0,0.0,0.0,19.0,0.0,0.0,0.0,0.0,8.0,9.0,0.06711409395973154,3.0,87.0,0.020134228187919465,14.0,29.0,0.10234899328859062,0.0,35.0,0.0,0.0,0.0,0.0,68.0,180.0,0.44966442953020136,3.0,34.0,0.026845637583892617,9.0,17.0,0.06040268456375839,9.0,5.0,0.09060402684563759,4.0,6.0,0.031879194630872486,0.0,0.0,0.0,2.0,15.0,0.010067114093959733,5.0,10.0,0.05033557046979866,"stevenson, m"
+213,0.0,0.0,0.0,4.0,487.0,0.005798346397508859,188.0,123.0,0.43841941372275317,0.0,0.0,0.0,0.0,0.0,0.0,15.0,515.0,0.021582733812949645,0.0,0.0,0.0,1.0,400.0,0.0016106517770857944,0.0,594.0,0.0,0.0,84.0,0.0,0.0,291.0,0.0,0.0,0.0,0.0,37.0,251.0,0.07570063352303233,12.0,195.0,0.019327821325029533,30.0,138.0,0.04252120691506497,65.0,195.0,0.11242349404058843,20.0,263.0,0.03221303554171589,0.0,0.0,0.0,116.0,221.0,0.21883388811338994,19.0,121.0,0.03156877483088157,"zhang, h"
+214,0.0,0.0,0.0,0.0,73.0,0.0,421.0,188.0,0.6979250990584124,0.0,0.0,0.0,0.0,0.0,0.0,0.0,72.0,0.0,0.0,0.0,0.0,14.0,5.0,0.022368028631076647,17.0,29.0,0.019598653657705252,13.0,82.0,0.023007115163393124,0.0,13.0,0.0,0.0,0.0,0.0,9.0,120.0,0.01597716330791189,1.0,42.0,0.00042605768821098373,3.0,20.0,0.002811980742192493,29.0,12.0,0.0549614417792169,30.0,179.0,0.0419240765199608,0.0,0.0,0.0,18.0,31.0,0.027054663201397468,49.0,64.0,0.09394572025052192,"clark, s"
+215,0.0,0.0,0.0,0.0,172.0,0.0,6.0,110.0,0.03508771929824561,0.0,0.0,0.0,0.0,0.0,0.0,3.0,36.0,0.021052631578947368,0.0,0.0,0.0,31.0,8.0,0.1649122807017544,0.0,113.0,0.0,23.0,42.0,0.08070175438596491,0.0,61.0,0.0,0.0,0.0,0.0,33.0,67.0,0.14035087719298245,2.0,14.0,0.007017543859649123,4.0,170.0,0.02456140350877193,1.0,93.0,0.007017543859649123,5.0,31.0,0.017543859649122806,0.0,0.0,0.0,74.0,609.0,0.48771929824561405,2.0,13.0,0.014035087719298246,"tiedemann, j"
+216,0.0,0.0,0.0,6.0,57.0,0.010821446138711265,49.0,83.0,0.4184292506968356,0.0,0.0,0.0,0.0,0.0,0.0,7.0,38.0,0.029021151008362025,0.0,0.0,0.0,5.0,52.0,0.05410723069355632,0.0,86.0,0.0,15.0,12.0,0.040006558452205285,0.0,20.0,0.0,0.0,0.0,0.0,14.0,32.0,0.12625020495163142,0.0,12.0,0.0,9.0,103.0,0.04000655845220528,0.0,23.0,0.0,26.0,44.0,0.28135759960649287,0.0,0.0,0.0,0.0,3.0,0.0,0.0,10.0,0.0,"neumann, g"
+217,0.0,0.0,0.0,1.0,50.0,0.004486987735566856,4.0,134.0,0.017947950942267425,0.0,0.0,0.0,0.0,0.0,0.0,18.0,90.0,0.07179180376906968,0.0,0.0,0.0,1.0,35.0,0.004486987735566856,0.0,21.0,0.0,37.0,32.0,0.12862698175291654,37.0,121.0,0.13610529464552795,0.0,0.0,0.0,44.0,20.0,0.1645228836374514,20.0,16.0,0.087047562069997,23.0,38.0,0.09482500747831289,37.0,223.0,0.12264433143882739,10.0,15.0,0.03888722704157942,0.0,0.0,0.0,25.0,22.0,0.08226144181872569,11.0,16.0,0.04636553993419084,"lee, l"
+218,0.0,0.0,0.0,9.0,0.0,0.03199017675380489,8.0,3.0,0.04342908844152907,0.0,0.0,0.0,0.0,0.0,0.0,48.0,1.0,0.352893656897276,0.0,0.0,0.0,2.0,9.0,0.02261931689662972,0.0,7.0,0.0,15.0,22.0,0.15607328658674507,12.0,20.0,0.16285908165573398,0.0,0.0,0.0,6.0,1.0,0.06333408731056321,8.0,16.0,0.07509613209681067,10.0,10.0,0.08782757617862798,0.0,8.0,0.0,0.0,15.0,0.0,0.0,0.0,0.0,1.0,4.0,0.0038775971822793804,0.0,0.0,0.0,"evans, r"
+219,0.0,0.0,0.0,3.0,27.0,0.005648720211827008,10.0,5.0,0.018358340688437776,0.0,0.0,0.0,0.0,0.0,0.0,339.0,828.0,0.589585172109444,0.0,0.0,0.0,3.0,29.0,0.006354810238305384,2.0,29.0,0.004236540158870256,9.0,4.0,0.01906443071491615,0.0,16.0,0.0,0.0,0.0,0.0,1.0,14.0,0.002118270079435128,11.0,63.0,0.01729920564872021,12.0,10.0,0.016593115622241833,38.0,21.0,0.0736098852603707,4.0,13.0,0.006354810238305384,0.0,0.0,0.0,10.0,18.0,0.016946160635481024,125.0,166.0,0.2238305383936452,"lemon, o"
+220,0.0,0.0,0.0,1.0,0.0,0.002900953170327393,1.0,0.0,0.005801906340654786,0.0,0.0,0.0,0.0,0.0,0.0,76.0,1.0,0.32200580190634065,0.0,0.0,0.0,2.0,9.0,0.011603812681309573,0.0,17.0,0.0,35.0,24.0,0.20306672192291753,26.0,32.0,0.15084956485702444,0.0,0.0,0.0,10.0,14.0,0.05028318828567481,9.0,83.0,0.04931620389556569,0.0,5.0,0.0,19.0,2.0,0.06976101671501589,3.0,5.0,0.013537781461527834,0.0,0.0,0.0,14.0,5.0,0.04641525072523829,19.0,3.0,0.07445779803840309,"araki, k"
+221,0.0,0.0,0.0,0.0,295.0,0.0,4.0,88.0,0.004680778192347809,0.0,0.0,0.0,0.0,0.0,0.0,45.0,123.0,0.052116469385531085,0.0,0.0,0.0,74.0,223.0,0.0916747533769583,3.0,158.0,0.004566612870583227,11.0,92.0,0.014099417237925715,84.0,175.0,0.14213582559690296,0.0,0.0,0.0,75.0,110.0,0.0910468441072531,169.0,260.0,0.24028686631759746,46.0,119.0,0.04225154771850982,5.0,37.0,0.005537018105582163,88.0,421.0,0.1034337815187101,0.0,0.0,0.0,24.0,43.0,0.024078504226711567,139.0,102.0,0.18409158134538636,"hovy, e"
+222,0.0,0.0,0.0,0.0,80.0,0.0,9.0,35.0,0.06128430588862244,0.0,0.0,0.0,0.0,0.0,0.0,7.0,88.0,0.02691180389022116,0.0,0.0,0.0,13.0,79.0,0.030508926192379427,1.0,60.0,0.0015987210231814548,63.0,147.0,0.15760724753530506,0.0,29.0,0.0,0.0,0.0,0.0,95.0,121.0,0.27831068478550486,3.0,216.0,0.010658140154543031,19.0,248.0,0.07926991739941379,0.0,10.0,0.0,81.0,582.0,0.27871036504130026,0.0,0.0,0.0,6.0,20.0,0.009325872635225151,23.0,77.0,0.06581401545430321,"dagan, i"
+223,0.0,0.0,0.0,1.0,29.0,0.002283341982355994,199.0,168.0,0.5387303234734475,0.0,0.0,0.0,0.0,0.0,0.0,26.0,58.0,0.11568932710603702,0.0,0.0,0.0,0.0,8.0,0.0,1.0,17.0,0.002283341982355994,2.0,12.0,0.002283341982355994,0.0,12.0,0.0,0.0,0.0,0.0,0.0,12.0,0.0,5.0,25.0,0.008752810932364642,16.0,57.0,0.05936689154125584,54.0,141.0,0.11039612523784811,3.0,12.0,0.009133367929423975,0.0,0.0,0.0,11.0,56.0,0.021311191835322606,61.0,25.0,0.12976993599723233,"roark, b"
+224,0.0,0.0,0.0,0.0,84.0,0.0,25.0,39.0,0.09568684305907939,0.0,0.0,0.0,0.0,0.0,0.0,8.0,52.0,0.033707865168539325,0.0,0.0,0.0,2.0,4.0,0.009061254077564335,11.0,1.0,0.05980427691192461,9.0,12.0,0.03189561435302646,10.0,12.0,0.03624501631025734,0.0,0.0,0.0,20.0,26.0,0.10329829648423342,3.0,4.0,0.016310257339615802,55.0,20.0,0.20297209133744107,59.0,71.0,0.2678506705328017,0.0,15.0,0.0,0.0,0.0,0.0,34.0,9.0,0.11598405219282348,5.0,14.0,0.027183762232693004,"sproat, r"
+225,0.0,0.0,0.0,9.0,24.0,0.10344827586206896,0.0,43.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,15.0,0.03065134099616858,0.0,0.0,0.0,3.0,62.0,0.0210727969348659,0.0,46.0,0.0,9.0,35.0,0.0842911877394636,0.0,138.0,0.0,0.0,0.0,0.0,56.0,140.0,0.4463601532567049,8.0,85.0,0.09195402298850575,3.0,95.0,0.03065134099616858,0.0,120.0,0.0,21.0,76.0,0.1743295019157088,0.0,0.0,0.0,3.0,7.0,0.017241379310344827,0.0,26.0,0.0,"zesch, t"
+226,0.0,0.0,0.0,0.0,247.0,0.0,0.0,18.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,52.0,0.0,0.0,0.0,0.0,0.0,59.0,0.0,9.0,135.0,0.07219251336898397,9.0,22.0,0.12032085561497328,1.0,155.0,0.008021390374331552,0.0,0.0,0.0,27.0,48.0,0.2379679144385027,1.0,203.0,0.008021390374331552,0.0,58.0,0.0,20.0,120.0,0.1543162719633308,0.0,90.0,0.0,0.0,0.0,0.0,34.0,46.0,0.3269671504965623,9.0,75.0,0.07219251336898397,"liu, p"
+227,0.0,0.0,0.0,0.0,43.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,58.0,0.0365296803652968,0.0,0.0,0.0,9.0,48.0,0.1004566210045662,0.0,173.0,0.0,4.0,105.0,0.02968036529680365,0.0,40.0,0.0,0.0,0.0,0.0,7.0,160.0,0.0821917808219178,0.0,43.0,0.0,31.0,88.0,0.24429223744292236,0.0,3.0,0.0,56.0,73.0,0.4794520547945205,0.0,0.0,0.0,3.0,11.0,0.0273972602739726,0.0,5.0,0.0,"ponzetto, s"
+228,0.0,0.0,0.0,2.0,177.0,0.021341463414634144,42.0,2.0,0.3052591463414634,0.0,0.0,0.0,0.0,0.0,0.0,0.0,77.0,0.0,0.0,0.0,0.0,3.0,64.0,0.019207317073170735,0.0,140.0,0.0,16.0,9.0,0.11364329268292683,18.0,30.0,0.12804878048780488,0.0,0.0,0.0,17.0,97.0,0.11844512195121952,0.0,52.0,0.0,1.0,16.0,0.008003048780487805,3.0,38.0,0.022408536585365853,20.0,35.0,0.1259908536585366,0.0,0.0,0.0,1.0,164.0,0.006402439024390244,19.0,101.0,0.13125,"hu, y"
+229,0.0,0.0,0.0,35.0,936.0,0.10056386174314477,39.0,375.0,0.07566233407341366,0.0,0.0,0.0,0.0,0.0,0.0,57.0,628.0,0.10549268342541747,0.0,0.0,0.0,27.0,693.0,0.041418001555872275,1.0,774.0,0.0017489367259677353,27.0,122.0,0.048095759964112715,1.0,412.0,0.0014574472716397792,0.0,0.0,0.0,38.0,425.0,0.08628087848107494,7.0,278.0,0.013991493807741881,4.0,195.0,0.007432981085362875,48.0,385.0,0.10019287516490918,144.0,436.0,0.23373290102040228,0.0,0.0,0.0,44.0,418.0,0.0929851359306179,51.0,236.0,0.09094470975032223,"liu, t"
+230,0.0,0.0,0.0,2.0,37.0,0.024390243902439025,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11.0,10.0,0.10162601626016259,0.0,0.0,0.0,3.0,10.0,0.036585365853658534,0.0,0.0,0.0,63.0,58.0,0.5934959349593496,0.0,12.0,0.0,0.0,0.0,0.0,11.0,12.0,0.1138211382113821,2.0,3.0,0.016260162601626015,5.0,27.0,0.05691056910569105,0.0,5.0,0.0,4.0,28.0,0.03252032520325203,0.0,0.0,0.0,0.0,1.0,0.0,3.0,0.0,0.024390243902439025,"lafourcade, m"
+231,0.0,0.0,0.0,0.0,67.0,0.0,7.0,10.0,0.04387504387504388,0.0,0.0,0.0,0.0,0.0,0.0,0.0,41.0,0.0,0.0,0.0,0.0,0.0,65.0,0.0,0.0,85.0,0.0,14.0,16.0,0.06212706212706213,0.0,83.0,0.0,0.0,0.0,0.0,11.0,121.0,0.08845208845208845,7.0,37.0,0.03474903474903475,8.0,6.0,0.08950508950508951,15.0,5.0,0.11232011232011231,26.0,43.0,0.22815022815022812,0.0,0.0,0.0,39.0,60.0,0.19866619866619867,13.0,26.0,0.14215514215514216,"zhao, s"
+232,0.0,0.0,0.0,35.0,542.0,0.04405607800570996,104.0,144.0,0.0789269654209955,0.0,0.0,0.0,0.0,0.0,0.0,44.0,452.0,0.05395273320989118,0.0,0.0,0.0,44.0,381.0,0.03977816898196711,1.0,415.0,0.0007661926609688689,53.0,48.0,0.05165415522698457,24.0,591.0,0.043098337179498875,0.0,0.0,0.0,86.0,274.0,0.09756186549670263,58.0,313.0,0.05382503443306304,12.0,88.0,0.011301341749290816,125.0,463.0,0.18420548557459887,154.0,485.0,0.1213594446927476,0.0,0.0,0.0,188.0,210.0,0.16619995804183046,53.0,370.0,0.053314239325750454,"li, s"
+233,0.0,0.0,0.0,0.0,20.0,0.0,5.0,21.0,0.02840909090909091,0.0,0.0,0.0,0.0,0.0,0.0,0.0,68.0,0.0,0.0,0.0,0.0,50.0,24.0,0.2840909090909091,0.0,1.0,0.0,0.0,31.0,0.0,84.0,197.0,0.4772727272727273,0.0,0.0,0.0,0.0,19.0,0.0,1.0,81.0,0.005681818181818183,12.0,50.0,0.06818181818181819,6.0,40.0,0.034090909090909095,4.0,53.0,0.02272727272727273,0.0,0.0,0.0,0.0,6.0,0.0,14.0,1.0,0.07954545454545454,"ku, l"
+234,0.0,0.0,0.0,0.0,125.0,0.0,22.0,38.0,0.035869565217391305,0.0,0.0,0.0,0.0,0.0,0.0,9.0,39.0,0.017934782608695653,0.0,0.0,0.0,24.0,10.0,0.04184782608695652,7.0,19.0,0.014673913043478263,21.0,10.0,0.04782608695652174,0.0,58.0,0.0,0.0,0.0,0.0,175.0,55.0,0.3554347826086957,39.0,30.0,0.08315217391304348,13.0,9.0,0.04782608695652174,45.0,35.0,0.10597826086956523,59.0,100.0,0.13260869565217392,0.0,0.0,0.0,1.0,5.0,0.002173913043478261,62.0,45.0,0.11467391304347829,"ji, d"
+235,0.0,0.0,0.0,0.0,76.0,0.0,84.0,51.0,0.10909555442003063,0.0,0.0,0.0,0.0,0.0,0.0,5.0,117.0,0.009439050701186622,0.0,0.0,0.0,0.0,84.0,0.0,7.0,80.0,0.013413387838528358,16.0,23.0,0.030304320672230732,4.0,86.0,0.007948674274683471,0.0,0.0,0.0,134.0,51.0,0.2481973542269914,5.0,89.0,0.006756373133480951,1.0,27.0,0.0011923011412025208,18.0,36.0,0.027820359961392146,207.0,157.0,0.3209845000851643,0.0,0.0,0.0,73.0,118.0,0.09111167887355928,84.0,36.0,0.1337364446715494,"tan, c"
+236,0.0,0.0,0.0,5.0,87.0,0.02702702702702703,7.0,2.0,0.036036036036036036,0.0,0.0,0.0,0.0,0.0,0.0,76.0,701.0,0.4558558558558558,0.0,0.0,0.0,4.0,20.0,0.043243243243243246,0.0,9.0,0.0,1.0,30.0,0.010810810810810811,0.0,3.0,0.0,0.0,0.0,0.0,1.0,27.0,0.0036036036036036032,3.0,31.0,0.010810810810810811,20.0,36.0,0.07567567567567568,27.0,57.0,0.13153153153153152,32.0,105.0,0.20180180180180182,0.0,0.0,0.0,0.0,9.0,0.0,1.0,7.0,0.0036036036036036032,"schlangen, d"
+237,0.0,0.0,0.0,2.0,758.0,0.0057821554173289455,76.0,155.0,0.19295878649829162,0.0,0.0,0.0,0.0,0.0,0.0,17.0,726.0,0.08962340896859865,0.0,0.0,0.0,1.0,545.0,0.0028910777086644727,10.0,913.0,0.03392197844832981,18.0,73.0,0.05203939875596051,2.0,444.0,0.0057821554173289455,0.0,0.0,0.0,23.0,215.0,0.08095017584260523,0.0,189.0,0.0,34.0,232.0,0.09829664209459207,73.0,363.0,0.22550406127582887,40.0,386.0,0.09003642006983643,0.0,0.0,0.0,38.0,314.0,0.052827874494687184,22.0,351.0,0.06938586500794733,"li, y"
+238,0.0,0.0,0.0,2.0,33.0,0.009081277433025578,67.0,22.0,0.22703193582563944,0.0,0.0,0.0,0.0,0.0,0.0,20.0,21.0,0.06281216891176024,0.0,0.0,0.0,0.0,5.0,0.0,0.0,3.0,0.0,21.0,37.0,0.056757983956409866,0.0,164.0,0.0,0.0,0.0,0.0,65.0,27.0,0.18692296049644314,21.0,58.0,0.05827153019524745,6.0,53.0,0.022703193582563945,30.0,94.0,0.08551536249432419,43.0,6.0,0.12668382019070681,0.0,0.0,0.0,18.0,88.0,0.04767670652338429,39.0,5.0,0.11654306039049493,"van den bosch, a"
+239,0.0,0.0,0.0,2.0,17.0,0.010573618821041502,0.0,18.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,24.0,59.0,0.08353158868622787,0.0,0.0,0.0,15.0,2.0,0.11419508326724824,1.0,18.0,0.005286809410520751,21.0,11.0,0.11102299762093579,1.0,122.0,0.003965107057890564,0.0,0.0,0.0,2.0,28.0,0.010573618821041502,3.0,9.0,0.01145475372279496,34.0,13.0,0.14670896114195087,35.0,54.0,0.18151378976121244,21.0,29.0,0.13877874702616974,0.0,0.0,0.0,19.0,9.0,0.10044937879989427,16.0,3.0,0.08194554586307164,"narayanan, s"
+240,0.0,0.0,0.0,2.0,216.0,0.003440975538909278,17.0,40.0,0.04301219423636597,0.0,0.0,0.0,0.0,0.0,0.0,24.0,217.0,0.06439539937101649,0.0,0.0,0.0,19.0,56.0,0.05548573056491211,5.0,193.0,0.010753048559091493,27.0,37.0,0.06004762995361759,33.0,157.0,0.11183170501455153,0.0,0.0,0.0,60.0,89.0,0.16416320800213013,20.0,132.0,0.04860377948709355,49.0,92.0,0.06954941468037842,100.0,50.0,0.1987368193692662,27.0,49.0,0.07025325058606442,0.0,0.0,0.0,24.0,292.0,0.04854233349532731,26.0,122.0,0.05118451114127552,"yu, h"
+241,0.0,0.0,0.0,0.0,1.0,0.0,21.0,2.0,0.04570575056011949,0.0,0.0,0.0,0.0,0.0,0.0,8.0,7.0,0.022404779686333084,0.0,0.0,0.0,1.0,3.0,0.004480955937266617,1.0,4.0,0.004480955937266617,99.0,71.0,0.2840926064227035,14.0,20.0,0.02598954443614638,0.0,0.0,0.0,30.0,14.0,0.12322628827483197,20.0,5.0,0.050784167289021666,13.0,7.0,0.04480955937266617,74.0,29.0,0.2025392083644511,20.0,11.0,0.042120985810306204,0.0,0.0,0.0,62.0,69.0,0.14936519790888722,0.0,0.0,0.0,"kwong, o"
+242,0.0,0.0,0.0,0.0,138.0,0.0,2.0,49.0,0.01083130246412131,0.0,0.0,0.0,0.0,0.0,0.0,6.0,64.0,0.018954779312212292,0.0,0.0,0.0,1.0,116.0,0.005415651232060655,0.0,148.0,0.0,22.0,49.0,0.1232060655293799,0.0,99.0,0.0,0.0,0.0,0.0,51.0,178.0,0.18413214189006227,5.0,8.0,0.031139994584348768,9.0,45.0,0.03411860276198213,33.0,65.0,0.16978066612510154,12.0,186.0,0.08394259409694015,0.0,0.0,0.0,45.0,4.0,0.1868399675060926,28.0,69.0,0.15163823449769834,"sun, l"
+243,0.0,0.0,0.0,0.0,143.0,0.0,9.0,73.0,0.04261572373254959,0.0,0.0,0.0,0.0,0.0,0.0,9.0,75.0,0.03379867744305657,0.0,0.0,0.0,2.0,54.0,0.011756061719324026,0.0,531.0,0.0,149.0,385.0,0.639235855988244,0.0,21.0,0.0,0.0,0.0,0.0,4.0,244.0,0.01763409257898604,8.0,60.0,0.03747244673034533,22.0,119.0,0.07200587803085966,8.0,14.0,0.03820720058780308,15.0,163.0,0.05731080088170462,0.0,0.0,0.0,10.0,71.0,0.032329169728141066,4.0,77.0,0.01763409257898604,"korhonen, a"
+244,0.0,0.0,0.0,0.0,38.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,5.0,0.03428571428571429,0.0,0.0,0.0,15.0,59.0,0.11714285714285715,0.0,9.0,0.0,8.0,3.0,0.06857142857142857,0.0,2.0,0.0,0.0,0.0,0.0,36.0,52.0,0.29428571428571426,16.0,171.0,0.11714285714285715,14.0,0.0,0.09714285714285714,0.0,4.0,0.0,14.0,54.0,0.11714285714285715,0.0,0.0,0.0,2.0,1.0,0.017142857142857144,22.0,26.0,0.13714285714285715,"chali, y"
+245,0.0,0.0,0.0,0.0,186.0,0.0,0.0,78.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,205.0,0.020066889632107024,0.0,0.0,0.0,15.0,114.0,0.137123745819398,0.0,419.0,0.0,8.0,23.0,0.0802675585284281,0.0,71.0,0.0,0.0,0.0,0.0,24.0,55.0,0.22408026755852845,16.0,133.0,0.137123745819398,13.0,28.0,0.10367892976588629,0.0,49.0,0.0,14.0,200.0,0.137123745819398,0.0,0.0,0.0,2.0,133.0,0.020066889632107024,20.0,120.0,0.14046822742474918,"joty, s"
+246,0.0,0.0,0.0,2.0,67.0,0.021337126600284494,6.0,3.0,0.04800853485064011,0.0,0.0,0.0,0.0,0.0,0.0,9.0,19.0,0.059032716927453766,0.0,0.0,0.0,20.0,30.0,0.13940256045519203,0.0,11.0,0.0,8.0,35.0,0.03520625889046942,0.0,70.0,0.0,0.0,0.0,0.0,12.0,35.0,0.06721194879089616,14.0,41.0,0.0995732574679943,23.0,26.0,0.18029871977240397,1.0,23.0,0.005334281650071123,42.0,27.0,0.33499288762446655,0.0,0.0,0.0,2.0,99.0,0.009601706970128023,0.0,1.0,0.0,"mitkov, r"
+247,0.0,0.0,0.0,0.0,44.0,0.0,5.0,15.0,0.022798176145908327,0.0,0.0,0.0,0.0,0.0,0.0,11.0,40.0,0.05279577633789297,0.0,0.0,0.0,98.0,215.0,0.4871610271178305,0.0,24.0,0.0,0.0,26.0,0.0,3.0,8.0,0.02159827213822894,0.0,0.0,0.0,18.0,40.0,0.10319174466042716,2.0,77.0,0.010079193664506839,19.0,29.0,0.09719222462203024,20.0,6.0,0.09599232061435084,15.0,32.0,0.04463642908567315,0.0,0.0,0.0,6.0,6.0,0.021358291336693064,9.0,15.0,0.04319654427645788,"nyberg, e"
+248,0.0,0.0,0.0,3.0,611.0,0.009978380176284716,9.0,95.0,0.03858307001496757,0.0,0.0,0.0,0.0,0.0,0.0,2.0,428.0,0.0066522534508564775,0.0,0.0,0.0,12.0,236.0,0.02993514052885415,0.0,439.0,0.0,9.0,54.0,0.061865957092965246,0.0,249.0,0.0,0.0,0.0,0.0,21.0,186.0,0.08115749210044902,0.0,264.0,0.0,5.0,75.0,0.014967570264427075,69.0,155.0,0.1979045401629802,0.0,242.0,0.0,0.0,0.0,0.0,178.0,505.0,0.5140528854149343,11.0,162.0,0.044902710793281224,"xu, j"
+249,0.0,0.0,0.0,0.0,26.0,0.0,34.0,2.0,0.1023391812865497,0.0,0.0,0.0,0.0,0.0,0.0,16.0,39.0,0.04093567251461988,0.0,0.0,0.0,20.0,16.0,0.05360623781676413,6.0,7.0,0.02046783625730994,11.0,8.0,0.029239766081871343,31.0,191.0,0.07992202729044834,0.0,0.0,0.0,18.0,14.0,0.038011695906432746,14.0,14.0,0.020955165692007796,7.0,56.0,0.02046783625730994,7.0,100.0,0.016569200779727095,92.0,351.0,0.3961988304093567,0.0,0.0,0.0,5.0,2.0,0.008771929824561403,57.0,64.0,0.17251461988304093,"ng, v"
+250,0.0,0.0,0.0,0.0,191.0,0.0,29.0,83.0,0.18449197860962568,0.0,0.0,0.0,0.0,0.0,0.0,4.0,78.0,0.0213903743315508,0.0,0.0,0.0,32.0,129.0,0.16844919786096257,28.0,149.0,0.18716577540106952,0.0,6.0,0.0,0.0,40.0,0.0,0.0,0.0,0.0,3.0,70.0,0.016042780748663103,1.0,41.0,0.0053475935828877,6.0,34.0,0.0374331550802139,8.0,52.0,0.06417112299465241,27.0,63.0,0.15106951871657753,0.0,0.0,0.0,0.0,97.0,0.0,23.0,124.0,0.1644385026737968,"huang, z"
+251,0.0,0.0,0.0,0.0,6.0,0.0,0.0,32.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12.0,29.0,0.04995836802664447,0.0,0.0,0.0,39.0,34.0,0.13322231473771856,0.0,38.0,0.0,82.0,102.0,0.3880099916736053,2.0,69.0,0.009991673605328892,0.0,0.0,0.0,69.0,91.0,0.29059117402164863,2.0,34.0,0.00832639467110741,8.0,25.0,0.03164029975020816,4.0,85.0,0.019983347210657785,15.0,121.0,0.05745212323064113,0.0,0.0,0.0,4.0,7.0,0.010824313072439633,0.0,33.0,0.0,"hirst, g"
+252,0.0,0.0,0.0,4.0,151.0,0.012066365007541475,25.0,95.0,0.08295625942684764,0.0,0.0,0.0,0.0,0.0,0.0,12.0,67.0,0.03619909502262443,0.0,0.0,0.0,18.0,72.0,0.05429864253393664,2.0,57.0,0.006033182503770738,27.0,93.0,0.0965309200603318,22.0,97.0,0.05656108597285067,0.0,0.0,0.0,27.0,62.0,0.10799396681749622,95.0,177.0,0.36108597285067867,9.0,22.0,0.02639517345399698,1.0,43.0,0.004524886877828054,18.0,79.0,0.055806938159879325,0.0,0.0,0.0,2.0,99.0,0.009049773755656108,29.0,34.0,0.09049773755656107,"okumura, m"
+253,0.0,0.0,0.0,0.0,23.0,0.0,1.0,3.0,0.007451564828614008,0.0,0.0,0.0,0.0,0.0,0.0,0.0,17.0,0.0,0.0,0.0,0.0,23.0,28.0,0.1213540557802853,1.0,0.0,0.0031935277836917177,31.0,47.0,0.22301469022780498,53.0,180.0,0.26751117734724295,0.0,0.0,0.0,38.0,42.0,0.24105812220566314,10.0,10.0,0.04657228017883755,9.0,60.0,0.04247391952309985,0.0,1.0,0.0,1.0,60.0,0.007451564828614008,0.0,0.0,0.0,0.0,3.0,0.0,5.0,15.0,0.039919097296146476,"montoyo, a"
+254,0.0,0.0,0.0,7.0,797.0,0.0075008320728219315,101.0,395.0,0.17314420701430625,0.0,0.0,0.0,0.0,0.0,0.0,8.0,446.0,0.008572379511796494,0.0,0.0,0.0,5.0,321.0,0.007232945213078291,0.0,697.0,0.0,15.0,78.0,0.016073211584618424,0.0,104.0,0.0,0.0,0.0,0.0,6.0,183.0,0.016073211584618424,8.0,211.0,0.009376040091027413,39.0,143.0,0.06161397774103729,94.0,142.0,0.15555296989114054,12.0,197.0,0.0167875765439348,0.0,0.0,0.0,362.0,1525.0,0.49199721830614324,23.0,185.0,0.036075430445476904,"liu, q"
+255,0.0,0.0,0.0,0.0,60.0,0.0,7.0,19.0,0.0516795865633075,0.0,0.0,0.0,0.0,0.0,0.0,2.0,127.0,0.015503875968992248,0.0,0.0,0.0,0.0,108.0,0.0,0.0,57.0,0.0,2.0,25.0,0.015503875968992248,0.0,82.0,0.0,0.0,0.0,0.0,37.0,95.0,0.2868217054263566,27.0,77.0,0.20930232558139536,2.0,53.0,0.010335917312661497,0.0,25.0,0.0,7.0,89.0,0.05426356589147287,0.0,0.0,0.0,33.0,150.0,0.22222222222222224,18.0,68.0,0.1343669250645995,"daum{\'e} iii, h"
+256,0.0,0.0,0.0,0.0,669.0,0.0,7.0,310.0,0.01780264496439471,0.0,0.0,0.0,0.0,0.0,0.0,10.0,634.0,0.057364078218605175,0.0,0.0,0.0,7.0,641.0,0.014581213970837574,1.0,617.0,0.005934214988131571,26.0,38.0,0.06993896236012208,15.0,359.0,0.08307900983384199,0.0,0.0,0.0,5.0,238.0,0.017520063298293205,3.0,278.0,0.011868429976263141,0.0,103.0,0.0,109.0,224.0,0.3958121397083757,15.0,280.0,0.037385554425228895,0.0,0.0,0.0,36.0,314.0,0.13244602690177462,33.0,297.0,0.15626766135413134,"wang, z"
+257,0.0,0.0,0.0,4.0,44.0,0.01664816870144284,5.0,2.0,0.012486126526082131,0.0,0.0,0.0,0.0,0.0,0.0,11.0,61.0,0.04328523862375139,0.0,0.0,0.0,0.0,10.0,0.0,0.0,28.0,0.0,0.0,140.0,0.0,0.0,170.0,0.0,0.0,0.0,0.0,27.0,32.0,0.1838235294117647,59.0,206.0,0.29994450610432855,14.0,82.0,0.05674250832408436,2.0,12.0,0.013873473917869035,34.0,30.0,0.3440621531631521,0.0,0.0,0.0,5.0,36.0,0.029134295227524976,0.0,4.0,0.0,"saggion, h"
+258,0.0,0.0,0.0,4.0,453.0,0.0058606341206118495,21.0,93.0,0.04922932661313954,0.0,0.0,0.0,0.0,0.0,0.0,81.0,524.0,0.06997597140010549,0.0,0.0,0.0,66.0,308.0,0.07736037039207642,4.0,392.0,0.004395475590458888,23.0,119.0,0.032819551075426355,104.0,530.0,0.12952001406552188,0.0,0.0,0.0,145.0,178.0,0.17253706851081288,25.0,142.0,0.03692199495985466,27.0,117.0,0.029889234015120437,128.0,577.0,0.25347242571646256,42.0,297.0,0.043368692492527686,0.0,0.0,0.0,33.0,216.0,0.05040145343726191,43.0,189.0,0.04424778761061947,"chen, h"
+259,0.0,0.0,0.0,6.0,10.0,0.06905812392096682,2.0,0.0,0.007673124880107424,0.0,0.0,0.0,0.0,0.0,0.0,16.0,3.0,0.11241127949357375,0.0,0.0,0.0,1.0,1.0,0.0023019374640322276,0.0,4.0,0.0,3.0,15.0,0.015346249760214849,0.0,0.0,0.0,0.0,0.0,0.0,43.0,18.0,0.4565509303663917,6.0,15.0,0.030692499520429697,5.0,10.0,0.057548436600805684,0.0,22.0,0.0,2.0,2.0,0.007673124880107424,0.0,0.0,0.0,41.0,242.0,0.22923460579320928,1.0,1.0,0.011509687320161137,"max, a"
+260,0.0,0.0,0.0,3.0,93.0,0.016981132075471698,75.0,182.0,0.5207547169811321,0.0,0.0,0.0,0.0,0.0,0.0,10.0,151.0,0.0509433962264151,0.0,0.0,0.0,1.0,75.0,0.0056603773584905665,0.0,20.0,0.0,37.0,34.0,0.1471698113207547,3.0,12.0,0.022641509433962266,0.0,0.0,0.0,0.0,14.0,0.0,1.0,85.0,0.0056603773584905665,1.0,24.0,0.0056603773584905665,0.0,28.0,0.0,40.0,53.0,0.22452830188679246,0.0,0.0,0.0,0.0,26.0,0.0,0.0,4.0,0.0,"gardent, c"
+261,0.0,0.0,0.0,0.0,0.0,0.0,88.0,64.0,0.8176670441676104,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,21.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,3.0,5.0,0.03963759909399773,0.0,0.0,0.0,10.0,6.0,0.13703284258210646,0.0,0.0,0.0,1.0,0.0,0.0056625141562853904,0.0,0.0,0.0,"parmentier, y"
+262,0.0,0.0,0.0,0.0,0.0,0.0,16.0,0.0,0.2080200501253133,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,1.0,17.0,0.015037593984962405,59.0,3.0,0.3558897243107769,0.0,2.0,0.0,0.0,0.0,0.0,23.0,51.0,0.16666666666666666,0.0,2.0,0.0,6.0,8.0,0.09022556390977443,1.0,4.0,0.0037593984962406013,22.0,7.0,0.15037593984962405,0.0,0.0,0.0,1.0,8.0,0.005012531328320802,1.0,5.0,0.005012531328320802,"preiss, j"
+263,0.0,0.0,0.0,10.0,19.0,0.022209234365867917,11.0,104.0,0.04558737580362362,0.0,0.0,0.0,0.0,0.0,0.0,49.0,32.0,0.16656925774400938,0.0,0.0,0.0,5.0,5.0,0.031560490940970194,0.0,40.0,0.0,2.0,21.0,0.0058445353594389245,0.0,5.0,0.0,0.0,0.0,0.0,21.0,46.0,0.06428988895382817,1.0,4.0,0.003506721215663355,181.0,82.0,0.6183518410286383,1.0,99.0,0.003506721215663355,7.0,5.0,0.017533606078316777,0.0,0.0,0.0,4.0,80.0,0.021040327293980133,0.0,7.0,0.0,"bird, s"
+264,0.0,0.0,0.0,7.0,223.0,0.014126289023873428,20.0,29.0,0.1356123746291849,0.0,0.0,0.0,0.0,0.0,0.0,23.0,219.0,0.05452747563215143,0.0,0.0,0.0,15.0,176.0,0.04237886707162029,1.0,338.0,0.004237886707162028,59.0,15.0,0.16386495267693177,0.0,121.0,0.0,0.0,0.0,0.0,59.0,42.0,0.16061590620144087,3.0,121.0,0.005085464048594435,77.0,88.0,0.15877948862833732,32.0,118.0,0.10086170363045628,22.0,59.0,0.14408814804350897,0.0,0.0,0.0,3.0,78.0,0.007345670292414183,2.0,55.0,0.008475773414324057,"lee, h"
+265,0.0,0.0,0.0,0.0,109.0,0.0,1.0,21.0,0.006888633754305395,0.0,0.0,0.0,0.0,0.0,0.0,12.0,84.0,0.09644087256027553,0.0,0.0,0.0,2.0,69.0,0.018369690011481053,2.0,68.0,0.01377726750861079,2.0,12.0,0.011481056257175657,2.0,96.0,0.01377726750861079,0.0,0.0,0.0,2.0,37.0,0.011481056257175657,59.0,53.0,0.3283582089552239,18.0,8.0,0.11940298507462684,50.0,53.0,0.3444316877152698,1.0,71.0,0.0034443168771526975,0.0,0.0,0.0,7.0,184.0,0.025258323765786447,1.0,55.0,0.006888633754305395,"zhou, l"
+266,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,0.05911330049261084,0.0,0.0,0.0,0.0,0.0,0.0,1.0,4.0,0.004547176960970065,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,60.0,42.0,0.31337627889352027,0.0,0.0,0.0,0.0,0.0,0.0,78.0,9.0,0.5456612353164078,1.0,5.0,0.0022735884804850324,12.0,30.0,0.03296703296703297,3.0,1.0,0.0034103827207275484,0.0,4.0,0.0,0.0,0.0,0.0,9.0,4.0,0.03865100416824555,0.0,0.0,0.0,"kilgarriff, a"
+267,0.0,0.0,0.0,3.0,16.0,0.0121654501216545,2.0,1.0,0.024330900243309,0.0,0.0,0.0,0.0,0.0,0.0,6.0,14.0,0.024330900243309,0.0,0.0,0.0,5.0,3.0,0.0608272506082725,0.0,18.0,0.0,0.0,6.0,0.0,0.0,12.0,0.0,0.0,0.0,0.0,2.0,28.0,0.016220600162206,1.0,30.0,0.0121654501216545,15.0,68.0,0.1727493917274939,0.0,25.0,0.0,0.0,14.0,0.0,0.0,0.0,0.0,66.0,220.0,0.6326034063260341,9.0,0.0,0.0446066504460665,"germann, u"
+268,0.0,0.0,0.0,1.0,644.0,0.005628517823639774,7.0,149.0,0.02814258911819887,0.0,0.0,0.0,0.0,0.0,0.0,8.0,380.0,0.030018761726078796,0.0,0.0,0.0,4.0,565.0,0.015009380863039398,0.0,709.0,0.0,40.0,107.0,0.20450281425891176,49.0,333.0,0.1643527204502814,0.0,0.0,0.0,55.0,199.0,0.21651031894934333,4.0,286.0,0.00900562851782364,11.0,40.0,0.04127579737335834,43.0,280.0,0.15309568480300187,9.0,243.0,0.050656660412757966,0.0,0.0,0.0,17.0,226.0,0.06378986866791743,8.0,200.0,0.01801125703564728,"liu, j"
+269,0.0,0.0,0.0,0.0,109.0,0.0,86.0,191.0,0.4328289775332417,0.0,0.0,0.0,0.0,0.0,0.0,23.0,164.0,0.09903713892709766,0.0,0.0,0.0,2.0,18.0,0.011004126547455296,0.0,84.0,0.0,5.0,13.0,0.016047684548372305,0.0,17.0,0.0,0.0,0.0,0.0,7.0,2.0,0.04218248509857864,23.0,89.0,0.0972031178358551,8.0,23.0,0.06602475928473177,6.0,42.0,0.06052269601100413,9.0,92.0,0.058688674919761576,0.0,0.0,0.0,13.0,69.0,0.05593764328289776,11.0,4.0,0.06052269601100413,"white, m"
+270,0.0,0.0,0.0,0.0,21.0,0.0,2.0,16.0,0.006795969881987443,0.0,0.0,0.0,0.0,0.0,0.0,154.0,38.0,0.5629220513041789,0.0,0.0,0.0,0.0,13.0,0.0,0.0,4.0,0.0,17.0,2.0,0.09242519039502922,0.0,20.0,0.0,0.0,0.0,0.0,4.0,5.0,0.010873551811179908,14.0,104.0,0.050484347694763856,1.0,11.0,0.010873551811179908,24.0,14.0,0.0843994735820155,14.0,29.0,0.13229488036935558,0.0,0.0,0.0,9.0,3.0,0.04893098315030959,0.0,0.0,0.0,"stent, a"
+271,0.0,0.0,0.0,0.0,1.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,56.0,60.0,0.5476100151745069,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,8.0,22.0,0.1274658573596358,0.0,0.0,0.0,0.0,26.0,0.0,22.0,18.0,0.17526555386949924,4.0,8.0,0.018209408194233688,4.0,5.0,0.0637329286798179,4.0,8.0,0.03186646433990895,0.0,0.0,0.0,4.0,6.0,0.035849772382397574,0.0,1.0,0.0,"oberlander, j"
+272,0.0,0.0,0.0,1.0,6.0,0.006699846860643184,3.0,1.0,0.046898928024502295,0.0,0.0,0.0,0.0,0.0,0.0,24.0,3.0,0.1609877488514548,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,22.0,0.00861408882082695,0.0,1.0,0.0,0.0,0.0,0.0,16.0,15.0,0.14739663093415004,6.0,11.0,0.043070444104134756,13.0,8.0,0.10384762633996936,22.0,0.0,0.1280149310872894,2.0,1.0,0.010049770290964776,0.0,0.0,0.0,62.0,124.0,0.3444199846860643,0.0,11.0,0.0,"blanchon, h"
+273,0.0,0.0,0.0,0.0,0.0,0.0,19.0,4.0,0.01879480720790545,0.0,0.0,0.0,0.0,0.0,0.0,260.0,132.0,0.5855281368505552,0.0,0.0,0.0,5.0,1.0,0.011722534392559585,0.0,0.0,0.0,4.0,4.0,0.011722534392559585,1.0,29.0,0.002131369889556288,0.0,0.0,0.0,6.0,0.0,0.008719240457275726,63.0,5.0,0.13252553491848204,6.0,2.0,0.006850831787859497,51.0,17.0,0.1167686218064052,25.0,13.0,0.07140089130013565,0.0,0.0,0.0,4.0,3.0,0.008259058322030615,10.0,6.0,0.025576438674675458,"moore, j"
+274,0.0,0.0,0.0,0.0,6.0,0.0,5.0,3.0,0.03804089396100808,0.0,0.0,0.0,0.0,0.0,0.0,42.0,33.0,0.3680456490727531,0.0,0.0,0.0,0.0,6.0,0.0,0.0,10.0,0.0,0.0,9.0,0.0,10.0,43.0,0.09795530194959581,0.0,0.0,0.0,1.0,4.0,0.00951022349025202,0.0,14.0,0.0,4.0,32.0,0.022824536376604847,29.0,10.0,0.27104136947218255,2.0,21.0,0.01426533523537803,0.0,0.0,0.0,15.0,3.0,0.13552068473609127,6.0,0.0,0.04279600570613409,"b{\'e}chet, f"
+275,0.0,0.0,0.0,4.0,2.0,0.03389830508474577,4.0,10.0,0.030131826741996236,0.0,0.0,0.0,0.0,0.0,0.0,8.0,11.0,0.03578154425612053,0.0,0.0,0.0,0.0,1.0,0.0,0.0,27.0,0.0,6.0,12.0,0.04519774011299436,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,0.04519774011299436,1.0,37.0,0.007532956685499059,81.0,52.0,0.7344632768361583,0.0,1.0,0.0,4.0,1.0,0.037664783427495296,0.0,0.0,0.0,4.0,1.0,0.030131826741996236,0.0,0.0,0.0,"romary, l"
+276,0.0,0.0,0.0,0.0,3.0,0.0,9.0,24.0,0.08265392184425076,0.0,0.0,0.0,0.0,0.0,0.0,53.0,8.0,0.4191734607815573,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,66.0,0.007871802080404834,0.0,14.0,0.0,0.0,0.0,0.0,2.0,1.0,0.011807703120607251,1.0,8.0,0.007871802080404834,7.0,57.0,0.046443632274388524,21.0,23.0,0.15653640708462185,23.0,7.0,0.2322181613719426,0.0,0.0,0.0,3.0,1.0,0.035423109361821754,0.0,2.0,0.0,"antoine, j"
+277,0.0,0.0,0.0,6.0,132.0,0.03960826985854189,4.0,4.0,0.03351468988030468,0.0,0.0,0.0,0.0,0.0,0.0,1.0,136.0,0.006093579978237214,0.0,0.0,0.0,0.0,13.0,0.0,0.0,26.0,0.0,26.0,132.0,0.15495103373231775,0.0,5.0,0.0,0.0,0.0,0.0,47.0,85.0,0.32752992383025026,2.0,19.0,0.010663764961915125,12.0,13.0,0.09401523394994561,8.0,3.0,0.05179542981501633,30.0,288.0,0.22850924918389556,0.0,0.0,0.0,2.0,2.0,0.013710554951033732,6.0,18.0,0.03960826985854189,"baroni, m"
+278,0.0,0.0,0.0,4.0,12.0,0.02180232558139535,0.0,28.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,11.0,0.014534883720930232,0.0,0.0,0.0,0.0,3.0,0.0,0.0,6.0,0.0,15.0,32.0,0.11627906976744186,0.0,1.0,0.0,0.0,0.0,0.0,2.0,17.0,0.014534883720930232,6.0,10.0,0.0436046511627907,7.0,44.0,0.032703488372093026,12.0,57.0,0.06007751937984496,2.0,1.0,0.014534883720930232,0.0,0.0,0.0,120.0,267.0,0.6455910852713178,10.0,1.0,0.036337209302325583,"lepage, y"
+279,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.011673151750972765,0.0,0.0,0.0,14.0,17.0,0.17976653696498057,0.0,0.0,0.0,5.0,11.0,0.08054474708171208,0.0,0.0,0.0,0.0,0.0,0.0,56.0,102.0,0.4918287937743191,1.0,1.0,0.011673151750972765,2.0,17.0,0.02801556420233463,2.0,0.0,0.011673151750972765,0.0,27.0,0.0,0.0,0.0,0.0,11.0,0.0,0.0642023346303502,11.0,0.0,0.12062256809338523,"pa{\c{s}}ca, m"
+280,0.0,0.0,0.0,2.0,26.0,0.019230769230769232,19.0,21.0,0.13701923076923078,0.0,0.0,0.0,0.0,0.0,0.0,2.0,12.0,0.016826923076923076,0.0,0.0,0.0,1.0,16.0,0.007211538461538462,0.0,24.0,0.0,48.0,164.0,0.37740384615384615,0.0,9.0,0.0,0.0,0.0,0.0,1.0,102.0,0.009615384615384616,3.0,10.0,0.021634615384615388,22.0,29.0,0.16346153846153846,3.0,12.0,0.028846153846153848,13.0,229.0,0.15625,0.0,0.0,0.0,0.0,7.0,0.0,8.0,12.0,0.0625,"lenci, a"
+281,0.0,0.0,0.0,0.0,23.0,0.0,52.0,36.0,0.22055937280086002,0.0,0.0,0.0,0.0,0.0,0.0,45.0,3.0,0.16541952960064502,0.0,0.0,0.0,0.0,3.0,0.0,3.0,38.0,0.007001884850820953,22.0,77.0,0.05650132081009685,0.0,5.0,0.0,0.0,0.0,0.0,5.0,8.0,0.02042216414822778,9.0,4.0,0.02708102736746811,34.0,66.0,0.21399598982958545,22.0,31.0,0.08022993058232342,6.0,73.0,0.015656992513641298,0.0,0.0,0.0,41.0,55.0,0.1441185935405844,5.0,11.0,0.04901319395574667,"levin, l"
+282,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,23.0,57.0,0.31690140845070425,0.0,0.0,0.0,1.0,0.0,0.009389671361502348,2.0,4.0,0.028169014084507043,11.0,5.0,0.07042253521126761,2.0,27.0,0.018779342723004695,0.0,0.0,0.0,6.0,20.0,0.056338028169014086,33.0,50.0,0.252112676056338,3.0,20.0,0.016901408450704227,17.0,23.0,0.14084507042253522,10.0,19.0,0.08450704225352113,0.0,0.0,0.0,1.0,0.0,0.005633802816901409,0.0,16.0,0.0,"murray, g"
+283,0.0,0.0,0.0,2.0,378.0,0.0030526583566522513,14.0,150.0,0.0206054439074027,0.0,0.0,0.0,0.0,0.0,0.0,22.0,201.0,0.037140676672602395,0.0,0.0,0.0,5.0,80.0,0.007631645891630629,8.0,142.0,0.012210633426609005,86.0,62.0,0.1747646909183414,1.0,114.0,0.0015263291783261257,0.0,0.0,0.0,82.0,148.0,0.11803612312388705,125.0,472.0,0.22284406003561436,47.0,50.0,0.058763673365555845,4.0,25.0,0.006105316713304503,140.0,561.0,0.22335283642838974,0.0,0.0,0.0,47.0,74.0,0.0697023658102264,29.0,154.0,0.04426354617145765,"lapata, m"
+284,0.0,0.0,0.0,0.0,26.0,0.0,64.0,104.0,0.32053742802303264,0.0,0.0,0.0,0.0,0.0,0.0,12.0,112.0,0.046065259117082535,0.0,0.0,0.0,1.0,5.0,0.005758157389635317,0.0,17.0,0.0,7.0,38.0,0.028790786948176585,0.0,0.0,0.0,0.0,0.0,0.0,41.0,38.0,0.21305182341650672,16.0,91.0,0.07677543186180422,12.0,24.0,0.07293666026871401,10.0,23.0,0.07485604606525913,24.0,48.0,0.09213051823416507,0.0,0.0,0.0,0.0,4.0,0.0,6.0,20.0,0.0690978886756238,"keller, f"
+285,0.0,0.0,0.0,2.0,22.0,0.01305577718140277,31.0,42.0,0.17407702908537026,0.0,0.0,0.0,0.0,0.0,0.0,14.0,32.0,0.06963081163414811,0.0,0.0,0.0,12.0,15.0,0.14796547472256474,0.0,7.0,0.0,4.0,0.0,0.02611155436280554,0.0,5.0,0.0,0.0,0.0,0.0,20.0,0.0,0.1207659389279756,4.0,11.0,0.021759628635671283,12.0,6.0,0.052223108725611075,0.0,1.0,0.0,7.0,13.0,0.028287517226372665,0.0,0.0,0.0,90.0,132.0,0.3461231594980779,0.0,59.0,0.0,"dymetman, m"
+286,0.0,0.0,0.0,0.0,51.0,0.0,28.0,11.0,0.15581854043392504,0.0,0.0,0.0,0.0,0.0,0.0,0.0,49.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,59.0,0.0,77.0,707.0,0.4714003944773175,8.0,80.0,0.03155818540433925,0.0,0.0,0.0,15.0,142.0,0.08678500986193294,2.0,36.0,0.011834319526627219,7.0,43.0,0.03944773175542406,0.0,2.0,0.0,33.0,129.0,0.18244575936883628,0.0,0.0,0.0,1.0,63.0,0.005917159763313609,2.0,15.0,0.014792899408284023,"schulte im walde, s"
+287,0.0,0.0,0.0,0.0,0.0,0.0,8.0,2.0,0.11278195488721805,0.0,0.0,0.0,0.0,0.0,0.0,51.0,8.0,0.31729323308270674,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,3.0,0.004511278195488722,0.0,0.0,0.0,0.0,0.0,0.0,3.0,23.0,0.013533834586466165,3.0,8.0,0.013533834586466165,29.0,17.0,0.2796992481203008,10.0,12.0,0.07368421052631578,16.0,25.0,0.1804511278195489,0.0,0.0,0.0,0.0,2.0,0.0,1.0,0.0,0.004511278195488722,"klein, e"
+288,0.0,0.0,0.0,1.0,43.0,0.00684931506849315,29.0,66.0,0.1952054794520548,0.0,0.0,0.0,0.0,0.0,0.0,9.0,10.0,0.06164383561643836,0.0,0.0,0.0,0.0,1.0,0.0,0.0,11.0,0.0,4.0,11.0,0.03767123287671233,0.0,6.0,0.0,0.0,0.0,0.0,12.0,12.0,0.060273972602739735,61.0,203.0,0.3746575342465754,2.0,8.0,0.020547945205479454,0.0,0.0,0.0,15.0,127.0,0.1506849315068493,0.0,0.0,0.0,3.0,70.0,0.01541095890410959,12.0,83.0,0.07705479452054795,"hirao, t"
+289,0.0,0.0,0.0,1.0,98.0,0.0016807446991897947,292.0,248.0,0.46444578520944657,0.0,0.0,0.0,0.0,0.0,0.0,3.0,31.0,0.0033614893983795894,0.0,0.0,0.0,2.0,18.0,0.006722978796759179,0.0,7.0,0.0,13.0,5.0,0.02941303223582141,0.0,5.0,0.0,0.0,0.0,0.0,8.0,38.0,0.009524219962075503,39.0,26.0,0.06498879503533873,3.0,21.0,0.0033614893983795894,6.0,14.0,0.010084468195138769,130.0,127.0,0.202723668333046,0.0,0.0,0.0,120.0,123.0,0.1776417858989829,19.0,130.0,0.026051542837441818,"gildea, d"
+290,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,45.0,0.0356795329224781,0.0,0.0,0.0,9.0,0.0,0.050600064871878035,0.0,0.0,0.0,8.0,14.0,0.04865390853065195,1.0,0.0,0.0038923126824521566,0.0,0.0,0.0,0.0,6.0,0.0,30.0,0.0,0.11903989620499512,0.0,0.0,0.0,45.0,32.0,0.2176451508271164,0.0,0.0,0.0,0.0,0.0,0.0,99.0,27.0,0.5102173207914368,3.0,2.0,0.014271813168991242,"yamamoto, s"
+291,0.0,0.0,0.0,3.0,74.0,0.012422360248447204,21.0,6.0,0.08695652173913043,0.0,0.0,0.0,0.0,0.0,0.0,29.0,14.0,0.10351966873706005,0.0,0.0,0.0,10.0,12.0,0.040027605244996545,0.0,98.0,0.0,15.0,21.0,0.05728088336783988,0.0,63.0,0.0,0.0,0.0,0.0,17.0,50.0,0.07039337474120083,13.0,155.0,0.05106970324361628,38.0,24.0,0.15458937198067632,11.0,30.0,0.037267080745341616,69.0,242.0,0.27881297446514836,0.0,0.0,0.0,3.0,4.0,0.012422360248447204,23.0,24.0,0.09523809523809523,"strube, m"
+292,0.0,0.0,0.0,0.0,17.0,0.0,0.0,17.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,13.0,0.02177068214804064,0.0,0.0,0.0,12.0,20.0,0.09252539912917272,0.0,29.0,0.0,0.0,24.0,0.0,8.0,27.0,0.04354136429608128,0.0,0.0,0.0,19.0,58.0,0.09433962264150944,73.0,58.0,0.45700290275761973,27.0,37.0,0.19103773584905664,1.0,4.0,0.00181422351233672,8.0,68.0,0.054426705370101594,0.0,0.0,0.0,0.0,4.0,0.0,8.0,12.0,0.04354136429608128,"teufel, s"
+293,0.0,0.0,0.0,0.0,0.0,0.0,6.0,2.0,0.016666666666666666,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,13.0,13.0,0.1,0.0,0.0,0.0,67.0,0.0,0.20280701754385963,0.0,18.0,0.0,0.0,0.0,0.0,30.0,0.0,0.14736842105263157,65.0,157.0,0.39824561403508774,2.0,7.0,0.004210526315789474,6.0,0.0,0.016666666666666666,22.0,14.0,0.10087719298245613,0.0,0.0,0.0,0.0,1.0,0.0,3.0,0.0,0.013157894736842105,"dang, h"
+294,0.0,0.0,0.0,18.0,66.0,0.1295336787564767,1.0,17.0,0.00690846286701209,0.0,0.0,0.0,0.0,0.0,0.0,30.0,105.0,0.2037996545768567,0.0,0.0,0.0,1.0,10.0,0.00690846286701209,0.0,2.0,0.0,3.0,29.0,0.018998272884283247,1.0,15.0,0.00690846286701209,0.0,0.0,0.0,3.0,11.0,0.018998272884283247,34.0,160.0,0.2193436960276339,2.0,16.0,0.010362694300518137,0.0,23.0,0.0,26.0,14.0,0.2210708117443869,0.0,0.0,0.0,9.0,67.0,0.0846286701208981,11.0,14.0,0.07253886010362695,"krahmer, e"
+295,0.0,0.0,0.0,4.0,120.0,0.02015790357802788,14.0,125.0,0.03913992944733747,0.0,0.0,0.0,0.0,0.0,0.0,13.0,13.0,0.03827201970995016,0.0,0.0,0.0,2.0,1.0,0.0023797525057394028,0.0,26.0,0.0,5.0,35.0,0.02385351923399966,0.0,4.0,0.0,0.0,0.0,0.0,8.0,20.0,0.023517554174365858,0.0,1.0,0.0,3.0,15.0,0.00671930119267596,16.0,20.0,0.030068872837224925,9.0,28.0,0.08399126490844952,0.0,0.0,0.0,252.0,345.0,0.720309087854863,5.0,96.0,0.011590794557366032,"watanabe, t"
+296,0.0,0.0,0.0,2.0,211.0,0.009278350515463918,3.0,271.0,0.018556701030927835,0.0,0.0,0.0,0.0,0.0,0.0,4.0,27.0,0.012371134020618558,0.0,0.0,0.0,2.0,44.0,0.008247422680412371,0.0,41.0,0.0,43.0,62.0,0.20618556701030927,0.0,16.0,0.0,0.0,0.0,0.0,9.0,67.0,0.03711340206185567,0.0,181.0,0.0,0.0,29.0,0.0,16.0,115.0,0.07010309278350516,42.0,168.0,0.18762886597938144,0.0,0.0,0.0,118.0,493.0,0.434020618556701,4.0,152.0,0.016494845360824743,"nagata, m"
+297,0.0,0.0,0.0,12.0,3.0,0.019540437850551835,15.0,0.0,0.03208491647065919,0.0,0.0,0.0,0.0,0.0,0.0,21.0,34.0,0.05234907424160183,0.0,0.0,0.0,13.0,3.0,0.026005669139376398,0.0,0.0,0.0,169.0,20.0,0.4585610035582896,5.0,15.0,0.015198118328206984,0.0,0.0,0.0,61.0,35.0,0.10305771666365117,6.0,21.0,0.015921838248597792,17.0,0.0,0.03377359628490441,27.0,19.0,0.09929437307761897,30.0,0.0,0.06484530486701647,0.0,0.0,0.0,19.0,19.0,0.04475001507749834,14.0,27.0,0.03461793619202702,"murata, m"
+298,0.0,0.0,0.0,0.0,41.0,0.0,77.0,44.0,0.14857280791744612,0.0,0.0,0.0,0.0,0.0,0.0,9.0,29.0,0.027759338603584573,0.0,0.0,0.0,9.0,70.0,0.03379397743045079,2.0,16.0,0.002896626636895782,40.0,35.0,0.1626335163840444,0.0,53.0,0.0,0.0,0.0,0.0,71.0,56.0,0.15448675396777503,5.0,20.0,0.027759338603584576,5.0,25.0,0.028966266368957817,45.0,36.0,0.08086416028000723,39.0,128.0,0.10958904109589042,0.0,0.0,0.0,47.0,42.0,0.15629714561583488,21.0,9.0,0.06638102709552833,"torisawa, k"
+299,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.014742014742014743,0.0,0.0,0.0,0.0,0.0,0.0,42.0,5.0,0.15847665847665848,0.0,0.0,0.0,0.0,10.0,0.0,0.0,0.0,0.0,4.0,17.0,0.01597051597051597,2.0,14.0,0.009828009828009828,0.0,0.0,0.0,19.0,9.0,0.09336609336609338,6.0,36.0,0.029484029484029485,4.0,1.0,0.0171990171990172,105.0,1.0,0.4017199017199017,7.0,15.0,0.0343980343980344,0.0,0.0,0.0,39.0,0.0,0.13636363636363638,18.0,33.0,0.08845208845208846,"kikui, g"
+300,0.0,0.0,0.0,3.0,112.0,0.020098258150960252,7.0,11.0,0.023447967842786962,0.0,0.0,0.0,0.0,0.0,0.0,5.0,6.0,0.018051213339288375,0.0,0.0,0.0,8.0,0.0,0.029961292243561116,0.0,0.0,0.0,1.0,14.0,0.0033497096918267085,1.0,0.0,0.0016748548459133543,0.0,0.0,0.0,0.0,7.0,0.0,23.0,15.0,0.10495757034390353,0.0,22.0,0.0,9.0,59.0,0.026909334524341227,9.0,4.0,0.03907994640464493,0.0,0.0,0.0,184.0,540.0,0.7324698526127736,0.0,21.0,0.0,"finch, a"
+301,0.0,0.0,0.0,1.0,510.0,0.0020028612303290413,47.0,560.0,0.11867747575902082,0.0,0.0,0.0,0.0,0.0,0.0,22.0,634.0,0.05786043554283897,0.0,0.0,0.0,3.0,421.0,0.008011444921316165,0.0,715.0,0.0,7.0,16.0,0.02670481640438722,1.0,236.0,0.0013352408202193609,0.0,0.0,0.0,52.0,144.0,0.13886504530281354,0.0,235.0,0.0,4.0,174.0,0.01602288984263233,36.0,344.0,0.13307900174852966,30.0,523.0,0.050357653791130184,0.0,0.0,0.0,78.0,410.0,0.2508027340645367,67.0,271.0,0.19628040057224605,"li, z"
+302,0.0,0.0,0.0,1.0,500.0,0.01016260162601626,35.0,261.0,0.1619241192411924,0.0,0.0,0.0,0.0,0.0,0.0,1.0,199.0,0.01016260162601626,0.0,0.0,0.0,2.0,128.0,0.008468834688346883,3.0,238.0,0.009146341463414634,4.0,23.0,0.008130081300813007,1.0,112.0,0.01016260162601626,0.0,0.0,0.0,0.0,72.0,0.0,0.0,62.0,0.0,0.0,46.0,0.0,90.0,315.0,0.4664634146341463,57.0,421.0,0.1781842818428184,0.0,0.0,0.0,25.0,332.0,0.058434959349593495,20.0,68.0,0.07876016260162601,"zhao, h"
+303,0.0,0.0,0.0,0.0,12.0,0.0,14.0,0.0,0.05028371890004364,0.0,0.0,0.0,0.0,0.0,0.0,3.0,69.0,0.013094718463553033,0.0,0.0,0.0,18.0,10.0,0.06110868616324748,0.0,19.0,0.0,140.0,56.0,0.4414666084679179,0.0,9.0,0.0,0.0,0.0,0.0,29.0,21.0,0.1232649498035792,9.0,20.0,0.04714098646879092,8.0,1.0,0.034046268005237884,5.0,9.0,0.01745962461807071,24.0,19.0,0.09506765604539501,0.0,0.0,0.0,53.0,19.0,0.10894805761676123,5.0,4.0,0.00811872544740288,"sato, s"
+304,0.0,0.0,0.0,0.0,0.0,0.0,125.0,42.0,0.33100411022076204,0.0,0.0,0.0,0.0,0.0,0.0,20.0,5.0,0.08513111589528763,0.0,0.0,0.0,26.0,0.0,0.05530934961281985,26.0,0.0,0.06708819258592037,6.0,6.0,0.010242472150522194,0.0,0.0,0.0,0.0,0.0,0.0,2.0,6.0,0.0020484944301044387,6.0,2.0,0.004811266523337399,32.0,5.0,0.13192304129872587,102.0,29.0,0.21816000113696343,4.0,0.0,0.003840927056445823,0.0,0.0,0.0,15.0,7.0,0.024377083718242824,43.0,13.0,0.06606394537086815,"harper, m"
+305,0.0,0.0,0.0,3.0,30.0,0.006584660052484747,147.0,40.0,0.283030637922636,0.0,0.0,0.0,0.0,0.0,0.0,210.0,110.0,0.3496570008221198,0.0,0.0,0.0,1.0,5.0,0.002194886684161582,0.0,0.0,0.0,11.0,9.0,0.01653481302068392,1.0,2.0,0.001097443342080791,0.0,0.0,0.0,16.0,19.0,0.03365492915714426,39.0,10.0,0.05190714474122478,5.0,7.0,0.013169320104969493,57.0,41.0,0.10754944752391753,4.0,19.0,0.006584660052484747,0.0,0.0,0.0,57.0,141.0,0.09291686962950697,22.0,16.0,0.03511818694658531,"bangalore, s"
+306,0.0,0.0,0.0,3.0,84.0,0.0029900568743622406,369.0,491.0,0.5767487482103165,0.0,0.0,0.0,0.0,0.0,0.0,15.0,127.0,0.015294378218464,0.0,0.0,0.0,0.0,77.0,0.0,1.0,32.0,0.0016611427079790222,29.0,55.0,0.05299045238453082,9.0,55.0,0.0224254265577168,0.0,0.0,0.0,40.0,60.0,0.055232995040302484,6.0,78.0,0.007641256456703502,56.0,148.0,0.050668807695045844,33.0,88.0,0.04850536707298744,62.0,350.0,0.08203671916404971,0.0,0.0,0.0,2.0,55.0,0.001993371249574827,47.0,46.0,0.08181127836796684,"miyao, y"
+307,0.0,0.0,0.0,0.0,68.0,0.0,222.0,298.0,0.6827683119620587,0.0,0.0,0.0,0.0,0.0,0.0,4.0,10.0,0.015808888108203058,0.0,0.0,0.0,7.0,2.0,0.016687159669769892,0.0,12.0,0.0,9.0,175.0,0.023713332162304587,6.0,20.0,0.015808888108203058,0.0,0.0,0.0,6.0,19.0,0.031617776216406115,0.0,11.0,0.0,8.0,64.0,0.033374319339539785,0.0,30.0,0.0,57.0,77.0,0.17934305287194802,0.0,0.0,0.0,1.0,14.0,0.0008782715615668365,0.0,27.0,0.0,"kallmeyer, l"
+308,0.0,0.0,0.0,0.0,30.0,0.0,23.0,5.0,0.07098649787900281,0.0,0.0,0.0,0.0,0.0,0.0,36.0,93.0,0.19783484323355055,0.0,0.0,0.0,4.0,15.0,0.015455847127621138,0.0,6.0,0.0,49.0,30.0,0.16260420987836044,1.0,4.0,0.0038639617819052844,0.0,0.0,0.0,22.0,39.0,0.07540579962263647,6.0,46.0,0.028335719733972083,40.0,68.0,0.07306400460329993,11.0,12.0,0.04195158506068594,25.0,113.0,0.09556865473912403,0.0,0.0,0.0,35.0,91.0,0.17825743687189713,11.0,7.0,0.05667143946794417,"tokunaga, t"
+309,0.0,0.0,0.0,22.0,159.0,0.0136707713824188,76.0,51.0,0.05833461067177722,0.0,0.0,0.0,0.0,0.0,0.0,34.0,196.0,0.02538608121332942,0.0,0.0,0.0,14.0,152.0,0.007402818730709543,5.0,176.0,0.0027935165021545443,390.0,538.0,0.24164983452042665,69.0,412.0,0.05063248660155112,0.0,0.0,0.0,89.0,165.0,0.057711511374056376,23.0,150.0,0.012885094866187835,126.0,152.0,0.06612879380855158,422.0,642.0,0.2802969562460942,90.0,239.0,0.05763373733507594,0.0,0.0,0.0,112.0,129.0,0.08703799273141516,63.0,79.0,0.038435794016251364,"huang, c"
+310,0.0,0.0,0.0,0.0,11.0,0.0,7.0,4.0,0.030153684640284627,0.0,0.0,0.0,0.0,0.0,0.0,23.0,32.0,0.2582310227173311,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,45.0,25.0,0.49259766897047813,0.0,100.0,0.0,0.0,0.0,0.0,14.0,9.0,0.10206759791198856,0.0,0.0,0.0,30.0,3.0,0.061240558747193155,5.0,1.0,0.016039193957598202,3.0,0.0,0.013472922924382494,0.0,0.0,0.0,1.0,0.0,0.0074849571802124955,2.0,1.0,0.01871239295053124,"shirai, k"
+311,0.0,0.0,0.0,0.0,13.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,75.0,110.0,0.39031892504452,0.0,0.0,0.0,53.0,2.0,0.4270681560628136,0.0,0.0,0.0,0.0,23.0,0.0,0.0,47.0,0.0,0.0,0.0,0.0,9.0,11.0,0.0949759861853111,4.0,19.0,0.03561599481949166,1.0,29.0,0.0023743996546327774,2.0,6.0,0.01403054341373914,0.0,7.0,0.0,0.0,0.0,0.0,4.0,0.0,0.023743996546327775,1.0,3.0,0.011871998273163887,"strzalkowski, t"
+312,0.0,0.0,0.0,1.0,62.0,0.005983545250560956,3.0,3.0,0.013462976813762153,0.0,0.0,0.0,0.0,0.0,0.0,19.0,44.0,0.05011219147344801,0.0,0.0,0.0,23.0,88.0,0.1914734480179506,0.0,74.0,0.0,0.0,1.0,0.0,0.0,14.0,0.0,0.0,0.0,0.0,51.0,20.0,0.3222139117427075,34.0,4.0,0.21211667913238594,2.0,15.0,0.010471204188481674,6.0,39.0,0.035901271503365736,11.0,25.0,0.05953627524308152,0.0,0.0,0.0,2.0,39.0,0.011967090501121913,14.0,25.0,0.08676140613313388,"wu, m"
+313,0.0,0.0,0.0,0.0,5.0,0.0,5.0,5.0,0.013722440764797365,0.0,0.0,0.0,0.0,0.0,0.0,110.0,45.0,0.47296679169334915,0.0,0.0,0.0,2.0,0.0,0.004391181044735157,0.0,0.0,0.0,2.0,5.0,0.007318635074558594,1.0,31.0,0.0021955905223675783,0.0,0.0,0.0,0.0,22.0,0.0,4.0,0.0,0.01088646967340591,32.0,55.0,0.07483304363736162,7.0,8.0,0.011892781996157715,158.0,188.0,0.37800750160095137,0.0,0.0,0.0,16.0,89.0,0.02378556399231543,0.0,23.0,0.0,"prasad, r"
+314,0.0,0.0,0.0,0.0,20.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,54.0,129.0,0.5340572556762093,0.0,0.0,0.0,1.0,25.0,0.002961500493583416,0.0,27.0,0.0,2.0,6.0,0.015794669299111552,0.0,25.0,0.0,0.0,0.0,0.0,9.0,17.0,0.07107601184600199,39.0,93.0,0.2981243830207305,6.0,67.0,0.04738400789733466,0.0,18.0,0.0,1.0,43.0,0.007897334649555776,0.0,0.0,0.0,1.0,3.0,0.002961500493583416,3.0,12.0,0.019743336623889437,"passonneau, r"
+315,0.0,0.0,0.0,3.0,2.0,0.010980392156862747,19.0,0.0,0.07930283224400872,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,26.0,6.0,0.14579520697167755,0.0,0.0,0.0,0.0,0.0,0.0,4.0,4.0,0.030501089324618737,1.0,0.0,0.009150326797385621,0.0,0.0,0.0,111.0,19.0,0.5778649237472767,0.0,0.0,0.0,0.0,0.0,0.0,9.0,64.0,0.05490196078431373,29.0,0.0,0.09150326797385622,"goh, c"
+316,0.0,0.0,0.0,4.0,0.0,0.008121002943863568,58.0,10.0,0.12059689371637397,0.0,0.0,0.0,0.0,0.0,0.0,37.0,0.0,0.062024159983758015,0.0,0.0,0.0,15.0,0.0,0.038574763983351947,11.0,0.0,0.03349913714343722,9.0,16.0,0.024566033905187294,15.0,0.0,0.03045376103948838,0.0,0.0,0.0,14.0,0.0,0.027915947619531013,55.0,39.0,0.10861841437417522,16.0,3.0,0.038574763983351947,25.0,1.0,0.049639630494366066,18.0,1.0,0.05177139376713025,0.0,0.0,0.0,119.0,55.0,0.19723885899908636,94.0,12.0,0.20840523804689878,"isozaki, h"
+317,0.0,0.0,0.0,0.0,87.0,0.0,17.0,1.0,0.06468276249298147,0.0,0.0,0.0,0.0,0.0,0.0,0.0,17.0,0.0,0.0,0.0,0.0,16.0,59.0,0.07186973610331274,0.0,7.0,0.0,129.0,36.0,0.4281864121280179,4.0,30.0,0.010780460415496912,0.0,0.0,0.0,26.0,66.0,0.08399775407074676,5.0,13.0,0.0145985401459854,2.0,3.0,0.0033688938798427845,10.0,48.0,0.03593486805165637,3.0,4.0,0.007748455923638404,0.0,0.0,0.0,78.0,256.0,0.23155530600786073,14.0,1.0,0.0472768107804604,"utsuro, t"
+318,0.0,0.0,0.0,3.0,135.0,0.011709601873536299,38.0,34.0,0.12939110070257612,0.0,0.0,0.0,0.0,0.0,0.0,16.0,72.0,0.05796252927400467,0.0,0.0,0.0,6.0,25.0,0.01756440281030445,11.0,131.0,0.06440281030444965,1.0,6.0,0.00234192037470726,15.0,13.0,0.058548009367681494,0.0,0.0,0.0,5.0,48.0,0.015612802498048398,12.0,111.0,0.040983606557377046,13.0,42.0,0.058548009367681494,8.0,106.0,0.030054644808743165,2.0,34.0,0.007806401249024199,0.0,0.0,0.0,69.0,140.0,0.18110850897736144,86.0,32.0,0.32396565183450426,"suzuki, j"
+319,0.0,0.0,0.0,6.0,312.0,0.05317024463305043,31.0,390.0,0.12610251289732072,0.0,0.0,0.0,0.0,0.0,0.0,9.0,54.0,0.018405724746214016,0.0,0.0,0.0,0.0,55.0,0.0,1.0,134.0,0.002096854717923116,2.0,100.0,0.02096854717923116,0.0,52.0,0.0,0.0,0.0,0.0,2.0,175.0,0.01048427358961558,2.0,22.0,0.012581128307538693,7.0,74.0,0.025087368946580133,17.0,95.0,0.12802462972208356,5.0,150.0,0.01048427358961558,0.0,0.0,0.0,128.0,393.0,0.5151855550008322,31.0,274.0,0.07740888666999501,"dyer, c"
+320,0.0,0.0,0.0,0.0,0.0,0.0,5.0,3.0,0.030784265819692156,0.0,0.0,0.0,0.0,0.0,0.0,0.0,25.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,0.0,98.0,13.0,0.8353286098216467,3.0,16.0,0.014659174199853409,0.0,0.0,0.0,2.0,18.0,0.009772782799902272,2.0,2.0,0.029318348399706817,4.0,9.0,0.017102369899828974,0.0,4.0,0.0,8.0,67.0,0.040312729049596874,0.0,0.0,0.0,0.0,8.0,0.0,5.0,0.0,0.022721720009772784,"matsuyoshi, s"
+321,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.005934718100890208,0.0,0.0,0.0,0.0,0.0,0.0,71.0,96.0,0.19980217606330364,0.0,0.0,0.0,1.0,0.0,0.002967359050445104,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.004945598417408506,53.0,274.0,0.29376854599406527,6.0,3.0,0.011869436201780416,85.0,136.0,0.47477744807121663,0.0,0.0,0.0,2.0,0.0,0.005934718100890208,0.0,4.0,0.0,"bunt, h"
+322,0.0,0.0,0.0,5.0,11.0,0.009664111073173828,1.0,12.0,0.0017814029627970186,0.0,0.0,0.0,0.0,0.0,0.0,6.0,22.0,0.01350897246787739,0.0,0.0,0.0,4.0,18.0,0.031174551848947827,0.0,4.0,0.0,70.0,204.0,0.2447026878941523,0.0,148.0,0.0,0.0,0.0,0.0,28.0,56.0,0.05772015509002158,3.0,17.0,0.008105383480726435,46.0,42.0,0.1535960722763766,88.0,143.0,0.40503165364128224,16.0,75.0,0.07222104511672912,0.0,0.0,0.0,1.0,6.0,0.0024939641479158263,0.0,5.0,0.0,"hsieh, s"
+323,0.0,0.0,0.0,8.0,5.0,0.09699769053117782,22.0,0.0,0.19399538106235564,0.0,0.0,0.0,0.0,0.0,0.0,4.0,2.0,0.023556581986143185,0.0,0.0,0.0,65.0,7.0,0.43926096997690534,0.0,0.0,0.0,0.0,1.0,0.0,0.0,16.0,0.0,0.0,0.0,0.0,10.0,31.0,0.07159353348729791,0.0,31.0,0.0,2.0,3.0,0.011547344110854502,8.0,0.0,0.036951501154734404,16.0,4.0,0.12609699769053118,0.0,0.0,0.0,0.0,1.0,0.0,0.0,28.0,0.0,"moll{\'a}, d"
+324,0.0,0.0,0.0,0.0,16.0,0.0,47.0,56.0,0.14266607222469907,0.0,0.0,0.0,0.0,0.0,0.0,5.0,22.0,0.022291573785109226,0.0,0.0,0.0,26.0,77.0,0.14177440927329468,4.0,4.0,0.011888839352058254,54.0,89.0,0.2189032545697726,0.0,17.0,0.0,0.0,0.0,0.0,52.0,22.0,0.17758953782137016,0.0,7.0,0.0,3.0,59.0,0.005944419676029127,37.0,2.0,0.1285480754941299,7.0,32.0,0.020805468866101944,0.0,0.0,0.0,27.0,0.0,0.11324119482835487,6.0,5.0,0.0163471541090801,"choi, k"
+325,0.0,0.0,0.0,0.0,29.0,0.0,108.0,158.0,0.7894570707070708,0.0,0.0,0.0,0.0,0.0,0.0,3.0,42.0,0.011363636363636366,0.0,0.0,0.0,0.0,42.0,0.0,0.0,10.0,0.0,10.0,101.0,0.07133838383838384,2.0,0.0,0.014204545454545456,0.0,0.0,0.0,0.0,8.0,0.0,0.0,1.0,0.0,2.0,163.0,0.012626262626262626,5.0,12.0,0.03787878787878788,0.0,69.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,10.0,35.0,0.06313131313131314,"nasr, a"
+326,0.0,0.0,0.0,7.0,88.0,0.01568892391997898,228.0,315.0,0.4544833539766543,0.0,0.0,0.0,0.0,0.0,0.0,101.0,76.0,0.14920992380737905,0.0,0.0,0.0,2.0,11.0,0.004128664189468153,0.0,2.0,0.0,44.0,81.0,0.0985925008444995,3.0,156.0,0.0061929962842022295,0.0,0.0,0.0,3.0,46.0,0.004954397027361783,37.0,17.0,0.05412303419284615,59.0,323.0,0.07556956799159253,23.0,60.0,0.032121007394062234,35.0,111.0,0.07720602034305446,0.0,0.0,0.0,20.0,25.0,0.021949480163645233,3.0,4.0,0.0057801298652554135,"rambow, o"
+327,0.0,0.0,0.0,0.0,17.0,0.0,126.0,218.0,0.8258599508599508,0.0,0.0,0.0,0.0,0.0,0.0,0.0,86.0,0.0,0.0,0.0,0.0,2.0,1.0,0.02457002457002457,0.0,7.0,0.0,3.0,2.0,0.018427518427518427,0.0,17.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.0,13.0,0.0,7.0,55.0,0.1167076167076167,0.0,4.0,0.0,2.0,2.0,0.006756756756756757,0.0,0.0,0.0,2.0,5.0,0.0076781326781326775,0.0,11.0,0.0,"maier, w"
+328,0.0,0.0,0.0,2.0,12.0,0.004507042253521127,280.0,422.0,0.8185915492957747,0.0,0.0,0.0,0.0,0.0,0.0,11.0,11.0,0.036056338028169016,0.0,0.0,0.0,3.0,0.0,0.009014084507042254,3.0,0.0,0.010140845070422537,0.0,8.0,0.0,1.0,1.0,0.0033802816901408453,0.0,0.0,0.0,1.0,0.0,0.0033802816901408453,0.0,0.0,0.0,5.0,3.0,0.015774647887323943,7.0,4.0,0.01915492957746479,1.0,34.0,0.0033802816901408453,0.0,0.0,0.0,4.0,1.0,0.009014084507042254,22.0,4.0,0.06760563380281691,"satta, g"
+329,0.0,0.0,0.0,0.0,106.0,0.0,12.0,345.0,0.08988764044943821,0.0,0.0,0.0,0.0,0.0,0.0,0.0,40.0,0.0,0.0,0.0,0.0,1.0,23.0,0.009630818619582666,0.0,42.0,0.0,1.0,41.0,0.009630818619582666,0.0,20.0,0.0,0.0,0.0,0.0,0.0,34.0,0.0,1.0,16.0,0.006420545746388444,0.0,46.0,0.0,7.0,135.0,0.04815409309791333,114.0,172.0,0.6821829855537721,0.0,0.0,0.0,0.0,21.0,0.0,22.0,11.0,0.15409309791332265,"sun, w"
+330,0.0,0.0,0.0,0.0,38.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,47.0,0.0,0.0,0.0,0.0,2.0,10.0,0.01728608470181504,0.0,18.0,0.0,44.0,0.0,0.3759723422644771,0.0,4.0,0.0,0.0,0.0,0.0,22.0,30.0,0.23163353500432154,1.0,6.0,0.00864304235090752,1.0,5.0,0.012964563526361281,12.0,8.0,0.13223854796888504,0.0,6.0,0.0,0.0,0.0,0.0,22.0,0.0,0.2212618841832325,0.0,8.0,0.0,"lin, t"
+331,0.0,0.0,0.0,0.0,215.0,0.0,15.0,68.0,0.017158591047983614,0.0,0.0,0.0,0.0,0.0,0.0,36.0,166.0,0.04172123165776837,0.0,0.0,0.0,101.0,180.0,0.10724119404989758,2.0,152.0,0.002350491924381317,13.0,53.0,0.023916255330579898,44.0,233.0,0.0373140592995534,0.0,0.0,0.0,91.0,108.0,0.10183506262382055,205.0,105.0,0.36379738759611835,17.0,65.0,0.02009670595346026,70.0,297.0,0.0833081494912864,37.0,99.0,0.05788086363788992,0.0,0.0,0.0,33.0,28.0,0.04608643094590511,87.0,157.0,0.09729357644135522,"lin, c"
+332,0.0,0.0,0.0,1.0,51.0,0.010928961748633882,3.0,0.0,0.03278688524590164,0.0,0.0,0.0,0.0,0.0,0.0,111.0,220.0,0.6848816029143899,0.0,0.0,0.0,0.0,30.0,0.0,0.0,17.0,0.0,3.0,41.0,0.01639344262295082,1.0,63.0,0.007285974499089254,0.0,0.0,0.0,1.0,11.0,0.004371584699453553,1.0,20.0,0.004371584699453553,3.0,17.0,0.018943533697632062,6.0,27.0,0.04371584699453552,6.0,69.0,0.07285974499089255,0.0,0.0,0.0,0.0,0.0,0.0,9.0,9.0,0.10346083788706742,"fern{\'a}ndez, r"
+333,0.0,0.0,0.0,0.0,3.0,0.0,29.0,8.0,0.14376590330788802,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,14.0,0.0,0.06615776081424936,2.0,1.0,0.010178117048346055,9.0,1.0,0.05216284987277353,0.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.06361323155216285,0.0,0.0,0.0,0.0,1.0,0.0,1.0,3.0,0.0050890585241730275,49.0,5.0,0.22137404580152673,0.0,0.0,0.0,71.0,84.0,0.3689567430025445,16.0,0.0,0.06870229007633588,"hwang, y"
+334,0.0,0.0,0.0,0.0,32.0,0.0,14.0,3.0,0.0850574712643678,0.0,0.0,0.0,0.0,0.0,0.0,40.0,30.0,0.17490421455938693,0.0,0.0,0.0,5.0,4.0,0.01704980842911877,0.0,33.0,0.0,14.0,17.0,0.06436781609195401,0.0,2.0,0.0,0.0,0.0,0.0,6.0,21.0,0.02758620689655172,10.0,4.0,0.04597701149425286,0.0,4.0,0.0,7.0,13.0,0.045977011494252866,13.0,57.0,0.05977011494252872,0.0,0.0,0.0,102.0,139.0,0.36206896551724127,19.0,12.0,0.1172413793103448,"imamura, k"
+335,0.0,0.0,0.0,0.0,149.0,0.0,0.0,31.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,20.0,215.0,0.0721747388414055,0.0,0.0,0.0,31.0,115.0,0.11039886039886039,3.0,114.0,0.021367521367521368,11.0,41.0,0.040123456790123455,0.0,211.0,0.0,0.0,0.0,0.0,50.0,135.0,0.22198480531813863,7.0,62.0,0.04392212725546058,5.0,72.0,0.026115859449192782,39.0,362.0,0.18779677113010443,3.0,76.0,0.010683760683760684,0.0,0.0,0.0,49.0,98.0,0.24050332383665715,6.0,77.0,0.02492877492877493,"wu, j"
+336,0.0,0.0,0.0,5.0,47.0,0.022099447513812154,29.0,1.0,0.2005524861878453,0.0,0.0,0.0,0.0,0.0,0.0,4.0,7.0,0.011049723756906077,0.0,0.0,0.0,0.0,4.0,0.0,0.0,25.0,0.0,81.0,375.0,0.3806629834254144,0.0,20.0,0.0,0.0,0.0,0.0,5.0,54.0,0.011602209944751382,1.0,35.0,0.0022099447513812156,8.0,110.0,0.03867403314917127,4.0,25.0,0.011049723756906077,25.0,44.0,0.2541436464088398,0.0,0.0,0.0,9.0,29.0,0.023756906077348067,4.0,30.0,0.04419889502762431,"villavicencio, a"
+337,0.0,0.0,0.0,5.0,17.0,0.01507537688442211,22.0,47.0,0.13333333333333333,0.0,0.0,0.0,0.0,0.0,0.0,7.0,8.0,0.028475711892797316,0.0,0.0,0.0,1.0,13.0,0.005025125628140704,1.0,1.0,0.005025125628140704,31.0,39.0,0.12395309882747067,0.0,3.0,0.0,0.0,0.0,0.0,22.0,10.0,0.11557788944723618,14.0,2.0,0.04690117252931323,21.0,27.0,0.10452261306532663,2.0,0.0,0.020100502512562814,54.0,120.0,0.3869346733668342,0.0,0.0,0.0,1.0,15.0,0.003350083752093802,3.0,15.0,0.011725293132328308,"copestake, a"
+338,0.0,0.0,0.0,0.0,2.0,0.0,1.0,2.0,0.008620689655172414,0.0,0.0,0.0,0.0,0.0,0.0,92.0,271.0,0.810344827586207,0.0,0.0,0.0,0.0,6.0,0.0,0.0,10.0,0.0,0.0,2.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,10.0,0.0,6.0,50.0,0.05172413793103449,9.0,6.0,0.07758620689655173,6.0,24.0,0.05172413793103449,0.0,2.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,0.0,51.0,0.0,"hastie, h"
+339,0.0,0.0,0.0,0.0,78.0,0.0,1.0,2.0,0.0008329036608098994,0.0,0.0,0.0,0.0,0.0,0.0,371.0,418.0,0.8216713600126917,0.0,0.0,0.0,1.0,35.0,0.0013326458572958393,0.0,21.0,0.0,5.0,14.0,0.011105382144131992,0.0,220.0,0.0,0.0,0.0,0.0,0.0,28.0,0.0,35.0,134.0,0.08923967794391781,2.0,39.0,0.004442152857652796,17.0,24.0,0.03728235434101455,8.0,123.0,0.01776861143061119,0.0,0.0,0.0,2.0,9.0,0.006663229286479195,6.0,9.0,0.009661682465394833,"walker, m"
+340,0.0,0.0,0.0,0.0,0.0,0.0,14.0,5.0,0.04115733991252663,0.0,0.0,0.0,0.0,0.0,0.0,136.0,39.0,0.46151545736607985,0.0,0.0,0.0,16.0,0.0,0.06044633845463721,3.0,0.0,0.006728720421666479,2.0,0.0,0.006280139060222048,2.0,0.0,0.006280139060222048,0.0,0.0,0.0,13.0,0.0,0.05102612986430414,14.0,1.0,0.04396097342155434,0.0,4.0,0.0,40.0,88.0,0.11509850099061716,14.0,0.0,0.04474599080408209,0.0,0.0,0.0,45.0,10.0,0.1588351837314493,1.0,6.0,0.00392508691263878,"okuno, h"
+341,0.0,0.0,0.0,3.0,52.0,0.005405560004576136,3.0,51.0,0.0019305571444914768,0.0,0.0,0.0,0.0,0.0,0.0,8.0,31.0,0.013585402127902985,0.0,0.0,0.0,0.0,40.0,0.0,0.0,49.0,0.0,37.0,279.0,0.11571902528314838,0.0,88.0,0.0,0.0,0.0,0.0,31.0,58.0,0.13018390344354192,21.0,21.0,0.06006177782862372,33.0,111.0,0.0857095870037753,6.0,20.0,0.018018533348587117,142.0,169.0,0.47403758151241276,0.0,0.0,0.0,24.0,21.0,0.09309575563436677,1.0,40.0,0.0022523166685733897,"pad{\'o}, s"
+342,0.0,0.0,0.0,0.0,47.0,0.0,75.0,15.0,0.25912154310945856,0.0,0.0,0.0,0.0,0.0,0.0,4.0,164.0,0.020915640250987688,0.0,0.0,0.0,5.0,23.0,0.013943760167325124,0.0,42.0,0.0,9.0,8.0,0.03997211247966536,0.0,10.0,0.0,0.0,0.0,0.0,49.0,134.0,0.18824076225888917,11.0,26.0,0.04531722054380666,11.0,17.0,0.025563560306762725,1.0,51.0,0.006971880083662562,5.0,39.0,0.025563560306762725,0.0,0.0,0.0,40.0,328.0,0.2279804787357658,22.0,159.0,0.14640948175691382,"riezler, s"
+343,0.0,0.0,0.0,0.0,2.0,0.0,0.0,107.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,66.0,31.0,0.4097826086956522,0.0,0.0,0.0,0.0,2.0,0.0,0.0,6.0,0.0,9.0,8.0,0.17391304347826086,1.0,3.0,0.021739130434782608,0.0,0.0,0.0,4.0,0.0,0.017391304347826087,10.0,1.0,0.09565217391304348,21.0,47.0,0.11413043478260869,0.0,0.0,0.0,14.0,25.0,0.1673913043478261,0.0,0.0,0.0,0.0,12.0,0.0,0.0,0.0,0.0,"green, n"
+344,0.0,0.0,0.0,2.0,103.0,0.002686651008104731,287.0,410.0,0.41474564335114933,0.0,0.0,0.0,0.0,0.0,0.0,10.0,107.0,0.008125083957844004,0.0,0.0,0.0,0.0,52.0,0.0,19.0,241.0,0.024627634240960036,16.0,47.0,0.019791662426371516,2.0,17.0,0.0022388758400872753,0.0,0.0,0.0,22.0,107.0,0.018887970723645377,5.0,34.0,0.006268852352244372,16.0,45.0,0.018582669472724387,110.0,325.0,0.12304861617119665,11.0,171.0,0.016119906048628386,0.0,0.0,0.0,16.0,96.0,0.030090491290772983,243.0,92.0,0.3147859431162709,"johnson, m"
+345,0.0,0.0,0.0,3.0,25.0,0.013114754098360656,8.0,27.0,0.03278688524590164,0.0,0.0,0.0,0.0,0.0,0.0,49.0,40.0,0.27672131147540985,0.0,0.0,0.0,6.0,3.0,0.024043715846994534,0.0,16.0,0.0,3.0,9.0,0.013114754098360656,0.0,62.0,0.0,0.0,0.0,0.0,3.0,7.0,0.009836065573770493,4.0,236.0,0.015737704918032787,12.0,29.0,0.04896174863387978,50.0,339.0,0.26885245901639343,21.0,33.0,0.16349726775956283,0.0,0.0,0.0,1.0,19.0,0.004371584699453552,25.0,0.0,0.12896174863387977,"tetreault, j"
+346,0.0,0.0,0.0,4.0,62.0,0.016146600884218622,0.0,10.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,23.0,14.0,0.05484718395591722,0.0,0.0,0.0,0.0,9.0,0.0,2.0,11.0,0.008073300442109311,4.0,25.0,0.01076440058947908,0.0,20.0,0.0,0.0,0.0,0.0,8.0,29.0,0.021977317870186457,168.0,375.0,0.6898186711091179,3.0,49.0,0.012109950663163966,12.0,50.0,0.015377715127827258,55.0,138.0,0.1628115589158711,0.0,0.0,0.0,1.0,10.0,0.004036650221054656,1.0,4.0,0.004036650221054656,"nenkova, a"
+347,0.0,0.0,0.0,0.0,112.0,0.0,28.0,214.0,0.051474585485186186,0.0,0.0,0.0,0.0,0.0,0.0,4.0,51.0,0.0011649128256902108,0.0,0.0,0.0,5.0,0.0,0.010532753465615654,0.0,12.0,0.0,21.0,98.0,0.04382984506659418,0.0,12.0,0.0,0.0,0.0,0.0,9.0,4.0,0.012231584669747213,5.0,7.0,0.006334213489690522,34.0,138.0,0.06653593756067254,87.0,123.0,0.24017104803323885,193.0,322.0,0.5470673319613248,0.0,0.0,0.0,7.0,80.0,0.008426202772492524,6.0,11.0,0.012231584669747213,"xue, n"
+348,0.0,0.0,0.0,0.0,69.0,0.0,65.0,139.0,0.16324034062023385,0.0,0.0,0.0,0.0,0.0,0.0,17.0,131.0,0.026499745805795626,0.0,0.0,0.0,0.0,135.0,0.0,8.0,45.0,0.011565836298932384,96.0,103.0,0.28236209964412806,1.0,57.0,0.0022241992882562275,0.0,0.0,0.0,60.0,30.0,0.16080960854092527,5.0,40.0,0.010231316725978646,31.0,54.0,0.05214158617183528,5.0,108.0,0.011120996441281138,96.0,182.0,0.23643238434163696,0.0,0.0,0.0,26.0,31.0,0.04337188612099644,0.0,21.0,0.0,"kawahara, d"
+349,0.0,0.0,0.0,11.0,61.0,0.02887435834759228,16.0,64.0,0.039415790760205335,0.0,0.0,0.0,0.0,0.0,0.0,55.0,95.0,0.1325776093864581,0.0,0.0,0.0,16.0,45.0,0.045373991689073585,0.0,150.0,0.0,34.0,40.0,0.07058176484967002,5.0,280.0,0.009166462967489611,0.0,0.0,0.0,40.0,85.0,0.09930334881447081,149.0,323.0,0.3681862625274994,18.0,41.0,0.03923246150085555,23.0,69.0,0.030890980200439996,21.0,105.0,0.05774871669518456,0.0,0.0,0.0,17.0,160.0,0.02869102908824249,23.0,23.0,0.04995722317281838,"mckeown, k"
+350,0.0,0.0,0.0,4.0,17.0,0.006558721049395367,2.0,0.0,0.01291248206599713,0.0,0.0,0.0,0.0,0.0,0.0,64.0,16.0,0.31543349046935854,0.0,0.0,0.0,7.0,0.0,0.022955523672883785,0.0,13.0,0.0,7.0,6.0,0.05164992826398852,0.0,16.0,0.0,0.0,0.0,0.0,13.0,9.0,0.04591104734576757,24.0,5.0,0.10903873744619798,12.0,1.0,0.04980528796884607,2.0,1.0,0.007173601147776183,80.0,25.0,0.37733142037302725,0.0,0.0,0.0,1.0,0.0,0.0012297601967616314,0.0,2.0,0.0,"power, r"
+351,0.0,0.0,0.0,1.0,248.0,0.0012186629526462396,0.0,43.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,17.0,438.0,0.11211699164345404,0.0,0.0,0.0,7.0,139.0,0.017061281337047353,0.0,242.0,0.0,11.0,83.0,0.03168523676880223,1.0,271.0,0.0010445682451253482,0.0,0.0,0.0,48.0,133.0,0.08896239554317549,6.0,159.0,0.02924791086350975,5.0,86.0,0.006789693593314764,246.0,617.0,0.6485027855153204,11.0,172.0,0.060933147632311974,0.0,0.0,0.0,0.0,102.0,0.0,1.0,256.0,0.002437325905292479,"chen, l"
+352,0.0,0.0,0.0,4.0,76.0,0.006206238064926798,31.0,219.0,0.06444939528962444,0.0,0.0,0.0,0.0,0.0,0.0,37.0,129.0,0.07622533418204964,0.0,0.0,0.0,0.0,90.0,0.0,4.0,75.0,0.008593252705283258,13.0,4.0,0.0362826225334182,5.0,40.0,0.014322087842138764,0.0,0.0,0.0,47.0,39.0,0.08911521323997453,104.0,43.0,0.2784850413749204,6.0,29.0,0.009548058561425841,45.0,33.0,0.09404837683004454,24.0,54.0,0.04774029280712921,0.0,0.0,0.0,49.0,20.0,0.11553150859325269,74.0,124.0,0.15945257797581155,"barzilay, r"
+353,0.0,0.0,0.0,0.0,19.0,0.0,0.0,85.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,18.0,2.0,0.1506352087114338,0.0,0.0,0.0,6.0,1.0,0.055535390199637026,0.0,0.0,0.0,3.0,1.0,0.026134301270417427,1.0,0.0,0.008711433756805808,0.0,0.0,0.0,21.0,0.0,0.17132486388384757,47.0,0.0,0.2969147005444646,20.0,10.0,0.24827586206896554,0.0,9.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,14.0,0.0032667876588021783,5.0,2.0,0.039201451905626135,"klavans, j"
+354,0.0,0.0,0.0,1.0,80.0,0.006622516556291391,13.0,19.0,0.11479028697571743,0.0,0.0,0.0,0.0,0.0,0.0,4.0,77.0,0.033112582781456956,0.0,0.0,0.0,1.0,11.0,0.008830022075055188,0.0,42.0,0.0,12.0,11.0,0.09713024282560706,0.0,66.0,0.0,0.0,0.0,0.0,9.0,19.0,0.05165562913907285,7.0,24.0,0.03708609271523179,9.0,7.0,0.059602649006622516,35.0,42.0,0.3200883002207505,15.0,44.0,0.09668874172185431,0.0,0.0,0.0,1.0,54.0,0.013245033112582781,20.0,16.0,0.16114790286975716,"yuan, c"
+355,0.0,0.0,0.0,27.0,94.0,0.26193317422434365,1.0,24.0,0.0059665871121718375,0.0,0.0,0.0,0.0,0.0,0.0,3.0,71.0,0.03579952267303103,0.0,0.0,0.0,0.0,56.0,0.0,2.0,46.0,0.017899761336515514,18.0,26.0,0.14856801909307876,0.0,363.0,0.0,0.0,0.0,0.0,8.0,95.0,0.08651551312649164,1.0,33.0,0.007159904534606206,1.0,20.0,0.008949880668257757,32.0,10.0,0.2899761336515513,10.0,13.0,0.11933174224343675,0.0,0.0,0.0,2.0,3.0,0.017899761336515514,0.0,18.0,0.0,"mukherjee, a"
+356,0.0,0.0,0.0,2.0,283.0,0.010319251854240567,37.0,21.0,0.2386326991293131,0.0,0.0,0.0,0.0,0.0,0.0,41.0,122.0,0.22057400838439212,0.0,0.0,0.0,7.0,96.0,0.04514672686230247,0.0,90.0,0.0,0.0,18.0,0.0,0.0,168.0,0.0,0.0,0.0,0.0,8.0,30.0,0.05159625927120283,3.0,13.0,0.017736214124475975,4.0,20.0,0.023218316672041276,39.0,68.0,0.2112221863914866,1.0,45.0,0.006449532408900354,0.0,0.0,0.0,36.0,135.0,0.14801676878426312,7.0,57.0,0.02708803611738149,"xu, b"
+357,0.0,0.0,0.0,9.0,807.0,0.024289531212047605,0.0,55.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,31.0,474.0,0.07469030847704639,0.0,0.0,0.0,21.0,296.0,0.043113917901384496,0.0,516.0,0.0,1.0,33.0,0.0024289531212047604,0.0,1058.0,0.0,0.0,0.0,0.0,103.0,203.0,0.22479961136750054,2.0,228.0,0.004857906242409521,23.0,131.0,0.04809327179985425,119.0,421.0,0.2783580276900656,57.0,191.0,0.13845032790867132,0.0,0.0,0.0,42.0,202.0,0.12387660918144279,15.0,184.0,0.037041535098372595,"wang, j"
+358,0.0,0.0,0.0,0.0,154.0,0.0,58.0,58.0,0.210738255033557,0.0,0.0,0.0,0.0,0.0,0.0,3.0,122.0,0.0116331096196868,0.0,0.0,0.0,0.0,36.0,0.0,3.0,92.0,0.01006711409395973,4.0,22.0,0.010738255033557045,0.0,96.0,0.0,0.0,0.0,0.0,15.0,185.0,0.04026845637583892,1.0,82.0,0.002684563758389261,4.0,44.0,0.016107382550335565,7.0,82.0,0.01879194630872483,135.0,345.0,0.49328859060402674,0.0,0.0,0.0,29.0,87.0,0.08165548098434004,31.0,92.0,0.10402684563758387,"zhu, q"
+359,0.0,0.0,0.0,1.0,0.0,0.006048387096774194,10.0,41.0,0.11290322580645161,0.0,0.0,0.0,0.0,0.0,0.0,27.0,6.0,0.14112903225806453,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,40.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,3.0,2.0,0.012096774193548388,2.0,5.0,0.008064516129032258,6.0,18.0,0.048387096774193554,4.0,0.0,0.048387096774193554,59.0,89.0,0.622983870967742,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,"danlos, l"
+360,0.0,0.0,0.0,0.0,158.0,0.0,411.0,1174.0,0.7920686225758329,0.0,0.0,0.0,0.0,0.0,0.0,33.0,39.0,0.05350571854798608,0.0,0.0,0.0,0.0,9.0,0.0,2.0,62.0,0.002320570197248467,5.0,143.0,0.005801425493121167,1.0,36.0,0.0011602850986242334,0.0,0.0,0.0,2.0,42.0,0.002320570197248467,1.0,23.0,0.00174042764793635,18.0,132.0,0.01874689209348583,4.0,54.0,0.0060914967677772255,21.0,64.0,0.011486822476379912,0.0,0.0,0.0,10.0,228.0,0.011602850986242334,33.0,52.0,0.09315431791811703,"nivre, j"
+361,0.0,0.0,0.0,0.0,98.0,0.0,32.0,149.0,0.18601583113456469,0.0,0.0,0.0,0.0,0.0,0.0,2.0,30.0,0.010554089709762534,0.0,0.0,0.0,1.0,71.0,0.0079155672823219,0.0,45.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,17.0,0.0,1.0,16.0,0.005277044854881267,19.0,23.0,0.11873350923482852,95.0,60.0,0.4551451187335093,0.0,19.0,0.0,0.0,0.0,0.0,39.0,52.0,0.15831134564643798,12.0,9.0,0.05804749340369394,"jiang, w"
+362,0.0,0.0,0.0,4.0,531.0,0.04920049200492005,1.0,140.0,0.012300123001230012,0.0,0.0,0.0,0.0,0.0,0.0,5.0,531.0,0.06150061500615006,0.0,0.0,0.0,0.0,320.0,0.0,9.0,727.0,0.04428044280442805,1.0,48.0,0.012300123001230012,0.0,146.0,0.0,0.0,0.0,0.0,14.0,129.0,0.17220172201722017,6.0,218.0,0.07380073800738007,1.0,48.0,0.006150061500615006,62.0,188.0,0.41943419434194346,3.0,261.0,0.03690036900369004,0.0,0.0,0.0,2.0,419.0,0.024600246002460024,15.0,185.0,0.08733087330873308,"zhou, j"
+363,0.0,0.0,0.0,0.0,73.0,0.0,2.0,23.0,0.009569377990430623,0.0,0.0,0.0,0.0,0.0,0.0,27.0,92.0,0.34928229665071775,0.0,0.0,0.0,0.0,60.0,0.0,0.0,121.0,0.0,0.0,2.0,0.0,1.0,122.0,0.014354066985645935,0.0,0.0,0.0,0.0,56.0,0.0,1.0,120.0,0.004784688995215312,6.0,34.0,0.0861244019138756,60.0,71.0,0.3397129186602871,12.0,62.0,0.0861244019138756,0.0,0.0,0.0,0.0,151.0,0.0,16.0,40.0,0.11004784688995216,"wang, k"
+364,0.0,0.0,0.0,0.0,156.0,0.0,0.0,63.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12.0,36.0,0.06646971935007386,0.0,0.0,0.0,5.0,45.0,0.029542097488921712,0.0,139.0,0.0,43.0,52.0,0.37518463810930575,1.0,75.0,0.008862629246676515,0.0,0.0,0.0,53.0,34.0,0.2998522895125554,24.0,73.0,0.10782865583456425,2.0,41.0,0.01772525849335303,3.0,105.0,0.019202363367799114,9.0,74.0,0.07533234859675038,0.0,0.0,0.0,0.0,99.0,0.0,0.0,70.0,0.0,"liu, m"
+365,0.0,0.0,0.0,12.0,326.0,0.03432857348911005,134.0,133.0,0.16442377037357564,0.0,0.0,0.0,0.0,0.0,0.0,8.0,163.0,0.024231934227607093,0.0,0.0,0.0,0.0,126.0,0.0,3.0,117.0,0.0022717438338381653,34.0,27.0,0.04205250252415981,2.0,401.0,0.002524159815375739,0.0,0.0,0.0,42.0,173.0,0.10540891389009086,4.0,54.0,0.00242319342276071,1.0,57.0,0.0006057983556901775,72.0,138.0,0.14433145824318475,264.0,687.0,0.22064041540458673,0.0,0.0,0.0,15.0,283.0,0.011358719169190826,141.0,166.0,0.2453988172508293,"zhou, g"
+366,0.0,0.0,0.0,6.0,344.0,0.02457897132453345,0.0,44.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,111.0,179.0,0.2216659080564406,0.0,0.0,0.0,23.0,222.0,0.055985434683659525,16.0,478.0,0.0436959490213928,10.0,44.0,0.023213472917614924,0.0,289.0,0.0,0.0,0.0,0.0,10.0,143.0,0.029358215748748286,5.0,101.0,0.019116977696859352,15.0,53.0,0.051206190259444694,88.0,348.0,0.2744651797906235,45.0,137.0,0.14610832954028216,0.0,0.0,0.0,30.0,249.0,0.0819299044151115,14.0,121.0,0.028675466545289026,"wang, c"
+367,0.0,0.0,0.0,6.0,0.0,0.014073494917904614,2.0,2.0,0.004691164972634871,0.0,0.0,0.0,0.0,0.0,0.0,74.0,98.0,0.5136825645035185,0.0,0.0,0.0,1.0,3.0,0.009382329945269743,0.0,0.0,0.0,0.0,0.0,0.0,8.0,4.0,0.07193119624706802,0.0,0.0,0.0,0.0,2.0,0.0,32.0,12.0,0.1970289288506646,5.0,22.0,0.023455824863174355,6.0,0.0,0.03752931978107897,16.0,12.0,0.12822517591868648,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,"mellish, c"
+368,0.0,0.0,0.0,0.0,18.0,0.0,73.0,67.0,0.5372596153846153,0.0,0.0,0.0,0.0,0.0,0.0,6.0,5.0,0.07932692307692307,0.0,0.0,0.0,2.0,7.0,0.01442307692307692,0.0,7.0,0.0,5.0,174.0,0.02884615384615384,0.0,2.0,0.0,0.0,0.0,0.0,6.0,2.0,0.07932692307692307,0.0,14.0,0.0,22.0,132.0,0.18749999999999997,0.0,25.0,0.0,13.0,41.0,0.06490384615384615,0.0,0.0,0.0,2.0,5.0,0.008413461538461536,0.0,8.0,0.0,"hinrichs, e"
+369,0.0,0.0,0.0,0.0,38.0,0.0,32.0,48.0,0.2641968378707082,0.0,0.0,0.0,0.0,0.0,0.0,11.0,12.0,0.027598465712414626,0.0,0.0,0.0,2.0,42.0,0.004490597810833568,0.0,50.0,0.0,2.0,6.0,0.0028066236317709796,0.0,63.0,0.0,0.0,0.0,0.0,3.0,27.0,0.008794087379549068,11.0,14.0,0.09907381420151556,6.0,11.0,0.017775283001216204,17.0,52.0,0.07503040508934418,62.0,19.0,0.358031621292918,0.0,0.0,0.0,8.0,39.0,0.022452989054167836,22.0,2.0,0.11974927495556179,"luo, x"
+370,0.0,0.0,0.0,2.0,82.0,0.006232147494157362,1.0,25.0,0.015580368735393403,0.0,0.0,0.0,0.0,0.0,0.0,2.0,78.0,0.010386912490262268,0.0,0.0,0.0,2.0,56.0,0.031160737470786806,0.0,71.0,0.0,29.0,33.0,0.100753051155544,1.0,153.0,0.0022257669621990575,0.0,0.0,0.0,24.0,31.0,0.12167526060021515,2.0,21.0,0.006232147494157362,4.0,21.0,0.015728753199540008,24.0,169.0,0.09633861334718255,52.0,10.0,0.5816670994546871,0.0,0.0,0.0,5.0,37.0,0.01201914159587491,0.0,32.0,0.0,"yang, l"
+371,0.0,0.0,0.0,7.0,369.0,0.04587155963302752,13.0,43.0,0.15095913261050875,0.0,0.0,0.0,0.0,0.0,0.0,2.0,178.0,0.0250208507089241,0.0,0.0,0.0,0.0,101.0,0.0,0.0,146.0,0.0,2.0,25.0,0.013344453711426188,0.0,83.0,0.0,0.0,0.0,0.0,13.0,72.0,0.09507923269391158,0.0,66.0,0.0,2.0,91.0,0.0250208507089241,5.0,196.0,0.041701417848206836,8.0,126.0,0.06672226855713094,0.0,0.0,0.0,63.0,305.0,0.42368640533778146,12.0,128.0,0.11259382819015847,"zhang, b"
+372,0.0,0.0,0.0,4.0,328.0,0.005693463903438852,67.0,41.0,0.07925301753586882,0.0,0.0,0.0,0.0,0.0,0.0,39.0,225.0,0.06612009413193654,0.0,0.0,0.0,56.0,104.0,0.07819023760722689,2.0,162.0,0.0018219084491004327,80.0,58.0,0.11682987929856524,7.0,175.0,0.00835041372504365,0.0,0.0,0.0,151.0,174.0,0.20283914066651482,57.0,379.0,0.07963258179609808,57.0,38.0,0.07409094359675092,62.0,246.0,0.08069536172473998,41.0,133.0,0.053063083580050095,0.0,0.0,0.0,39.0,63.0,0.051924390799362334,68.0,121.0,0.10149548318530326,"li, w"
+373,0.0,0.0,0.0,0.0,292.0,0.0,7.0,13.0,0.06635071090047392,0.0,0.0,0.0,0.0,0.0,0.0,0.0,428.0,0.0,0.0,0.0,0.0,0.0,197.0,0.0,0.0,161.0,0.0,0.0,42.0,0.0,24.0,484.0,0.27014218009478674,0.0,0.0,0.0,11.0,146.0,0.1042654028436019,0.0,48.0,0.0,0.0,28.0,0.0,14.0,37.0,0.13270142180094785,1.0,64.0,0.009478672985781991,0.0,0.0,0.0,1.0,61.0,0.009478672985781991,43.0,175.0,0.4075829383886256,"liu, b"
+374,0.0,0.0,0.0,2.0,420.0,0.009693053311793216,7.0,89.0,0.033925686591276254,0.0,0.0,0.0,0.0,0.0,0.0,21.0,207.0,0.23828756058158318,0.0,0.0,0.0,2.0,191.0,0.024232633279483037,0.0,298.0,0.0,2.0,17.0,0.008077544426494346,0.0,330.0,0.0,0.0,0.0,0.0,0.0,255.0,0.0,0.0,202.0,0.0,9.0,62.0,0.07754442649434573,43.0,240.0,0.1890145395799677,3.0,77.0,0.03231017770597738,0.0,0.0,0.0,0.0,143.0,0.0,48.0,103.0,0.3869143780290792,"sun, x"
+375,0.0,0.0,0.0,1.0,119.0,0.009005145797598627,24.0,33.0,0.17238421955403085,0.0,0.0,0.0,0.0,0.0,0.0,2.0,81.0,0.005145797598627787,0.0,0.0,0.0,0.0,350.0,0.0,0.0,288.0,0.0,0.0,14.0,0.0,0.0,54.0,0.0,0.0,0.0,0.0,0.0,65.0,0.0,0.0,63.0,0.0,0.0,68.0,0.0,4.0,42.0,0.03602058319039451,45.0,76.0,0.3647084048027444,0.0,0.0,0.0,9.0,12.0,0.07654373927958834,45.0,98.0,0.33619210977701547,"riedel, s"
+376,0.0,0.0,0.0,13.0,248.0,0.05383360522022838,14.0,21.0,0.039151712887438815,0.0,0.0,0.0,0.0,0.0,0.0,11.0,158.0,0.027732463295269166,0.0,0.0,0.0,2.0,156.0,0.006851549755301793,11.0,286.0,0.05383360522022838,0.0,22.0,0.0,2.0,51.0,0.004893964110929852,0.0,0.0,0.0,47.0,130.0,0.13295269168026097,0.0,98.0,0.0,2.0,29.0,0.006851549755301793,46.0,42.0,0.18433931484502442,8.0,116.0,0.018597063621533436,0.0,0.0,0.0,70.0,42.0,0.20750407830342574,66.0,47.0,0.26345840130505704,"huang, j"
+377,0.0,0.0,0.0,4.0,56.0,0.0032102728731942215,247.0,319.0,0.492776886035313,0.0,0.0,0.0,0.0,0.0,0.0,11.0,36.0,0.019261637239165328,0.0,0.0,0.0,10.0,23.0,0.021936864633493845,16.0,83.0,0.024077046548956663,1.0,39.0,0.0032102728731942215,7.0,5.0,0.008560727661851257,0.0,0.0,0.0,17.0,35.0,0.021669341894060994,3.0,10.0,0.004280363830925628,17.0,31.0,0.023006955591225255,12.0,64.0,0.013643659711075442,5.0,24.0,0.009630818619582664,0.0,0.0,0.0,67.0,34.0,0.10406634563937935,153.0,253.0,0.25066880684858217,"eisner, j"
+378,0.0,0.0,0.0,3.0,38.0,0.004259346900141979,239.0,154.0,0.42849029815428297,0.0,0.0,0.0,0.0,0.0,0.0,15.0,50.0,0.02754377662091813,0.0,0.0,0.0,11.0,88.0,0.012304779933743492,6.0,79.0,0.005679129200189305,9.0,7.0,0.01381921438712731,1.0,0.0,0.0009465215333648841,0.0,0.0,0.0,9.0,45.0,0.010033128253667771,2.0,24.0,0.0028395646000946525,5.0,26.0,0.007098911500236631,20.0,5.0,0.019167061050638902,17.0,23.0,0.02271651680075722,0.0,0.0,0.0,69.0,132.0,0.08873639375295789,179.0,237.0,0.3563653573118789,"collins, m"
+379,0.0,0.0,0.0,0.0,339.0,0.0,33.0,26.0,0.20052770448548812,0.0,0.0,0.0,0.0,0.0,0.0,3.0,124.0,0.0474934036939314,0.0,0.0,0.0,12.0,110.0,0.05804749340369393,0.0,209.0,0.0,2.0,28.0,0.01187335092348285,0.0,184.0,0.0,0.0,0.0,0.0,2.0,90.0,0.0316622691292876,2.0,117.0,0.021108179419525065,0.0,63.0,0.0,23.0,202.0,0.1662269129287599,77.0,63.0,0.3786279683377308,0.0,0.0,0.0,4.0,646.0,0.01846965699208443,10.0,136.0,0.06596306068601582,"wang, m"
+380,0.0,0.0,0.0,0.0,252.0,0.0,51.0,49.0,0.4083457526080477,0.0,0.0,0.0,0.0,0.0,0.0,3.0,44.0,0.026825633383010434,0.0,0.0,0.0,0.0,167.0,0.0,6.0,63.0,0.03427719821162444,9.0,9.0,0.06557377049180328,0.0,84.0,0.0,0.0,0.0,0.0,16.0,31.0,0.14008941877794337,0.0,42.0,0.0,2.0,12.0,0.01788375558867362,4.0,62.0,0.03576751117734724,1.0,76.0,0.00894187779433681,0.0,0.0,0.0,1.0,84.0,0.01788375558867362,35.0,26.0,0.2444113263785395,"wang, q"
+381,0.0,0.0,0.0,0.0,27.0,0.0,12.0,11.0,0.0681384573453257,0.0,0.0,0.0,0.0,0.0,0.0,3.0,3.0,0.011810665939856455,0.0,0.0,0.0,3.0,0.0,0.009811937857726903,0.0,9.0,0.0,16.0,17.0,0.05655491959662033,0.0,17.0,0.0,0.0,0.0,0.0,16.0,9.0,0.10584173707640593,3.0,0.0,0.012264922322158627,19.0,1.0,0.05323884800581448,2.0,0.0,0.01362769146906514,73.0,219.0,0.48500953938402824,0.0,0.0,0.0,5.0,1.0,0.02834559825565549,19.0,3.0,0.1553556827473426,"zanzotto, f"
+382,0.0,0.0,0.0,1.0,3.0,0.0020711080428028996,59.0,19.0,0.32861580945806007,0.0,0.0,0.0,0.0,0.0,0.0,14.0,18.0,0.06627545736969279,0.0,0.0,0.0,8.0,2.0,0.06213324128408699,0.0,0.0,0.0,2.0,16.0,0.006213324128408699,0.0,23.0,0.0,0.0,0.0,0.0,5.0,5.0,0.01933034173282706,3.0,14.0,0.024853296513634795,16.0,25.0,0.06247842595788747,1.0,45.0,0.002761477390403866,74.0,87.0,0.3769416637901277,0.0,0.0,0.0,6.0,1.0,0.026924404556437694,4.0,0.0,0.021401449775629963,"delmonte, r"
+383,0.0,0.0,0.0,0.0,6.0,0.0,10.0,24.0,0.049664429530201344,0.0,0.0,0.0,0.0,0.0,0.0,21.0,38.0,0.10604026845637585,0.0,0.0,0.0,0.0,9.0,0.0,0.0,15.0,0.0,1.0,7.0,0.004026845637583893,43.0,10.0,0.1731543624161074,0.0,0.0,0.0,22.0,37.0,0.08859060402684564,5.0,1.0,0.020134228187919465,8.0,3.0,0.032214765100671144,61.0,216.0,0.2684563758389262,19.0,15.0,0.09261744966442954,0.0,0.0,0.0,31.0,5.0,0.1261744966442953,8.0,53.0,0.038926174496644296,"yeh, j"
+384,0.0,0.0,0.0,0.0,71.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,222.0,0.0,0.0,0.0,0.0,50.0,31.0,0.359504132231405,5.0,203.0,0.03541912632821724,11.0,18.0,0.12987012987012989,2.0,72.0,0.014167650531286897,0.0,0.0,0.0,37.0,51.0,0.2768595041322314,4.0,72.0,0.023612750885478158,1.0,56.0,0.011806375442739079,8.0,68.0,0.08500590318772137,3.0,23.0,0.03541912632821724,0.0,0.0,0.0,0.0,42.0,0.0,4.0,70.0,0.028335301062573794,"yu, y"
+385,0.0,0.0,0.0,1.0,31.0,0.006514657980456026,10.0,16.0,0.06514657980456026,0.0,0.0,0.0,0.0,0.0,0.0,14.0,19.0,0.13680781758957655,0.0,0.0,0.0,0.0,7.0,0.0,0.0,14.0,0.0,7.0,13.0,0.0749185667752443,0.0,0.0,0.0,0.0,0.0,0.0,46.0,28.0,0.4723127035830619,0.0,1.0,0.0,4.0,8.0,0.03908794788273616,11.0,47.0,0.10749185667752444,0.0,0.0,0.0,0.0,0.0,0.0,3.0,59.0,0.02931596091205212,6.0,0.0,0.06840390879478828,"church, k"
+386,0.0,0.0,0.0,2.0,303.0,0.002770850651149904,6.0,12.0,0.011083402604599614,0.0,0.0,0.0,0.0,0.0,0.0,4.0,129.0,0.00609587143252979,0.0,0.0,0.0,0.0,61.0,0.0,1.0,176.0,0.002770850651149904,48.0,33.0,0.1306917890459038,0.0,98.0,0.0,0.0,0.0,0.0,25.0,195.0,0.05126073704627322,9.0,192.0,0.01570148702318279,3.0,35.0,0.0033250207813798854,175.0,496.0,0.2734829592684955,41.0,200.0,0.09310058187863678,0.0,0.0,0.0,219.0,550.0,0.346633416458853,43.0,99.0,0.06308303315784615,"chen, b"
+387,0.0,0.0,0.0,24.0,170.0,0.07466329205459642,50.0,170.0,0.14679562505649463,0.0,0.0,0.0,0.0,0.0,0.0,23.0,197.0,0.06327397631745459,0.0,0.0,0.0,0.0,27.0,0.0,0.0,53.0,0.0,1.0,4.0,0.0025309590526981832,12.0,78.0,0.03986260507999639,0.0,0.0,0.0,6.0,57.0,0.02277863147428365,6.0,117.0,0.021513151947934562,0.0,13.0,0.0,58.0,114.0,0.13893157371418247,3.0,178.0,0.00759287715809455,0.0,0.0,0.0,177.0,477.0,0.4213142908795083,21.0,87.0,0.060743017264756394,"zong, c"
+388,0.0,0.0,0.0,3.0,56.0,0.013924703455389376,5.0,3.0,0.023207839092315625,0.0,0.0,0.0,0.0,0.0,0.0,28.0,34.0,0.1346054667354306,0.0,0.0,0.0,2.0,26.0,0.0061887570912841664,0.0,15.0,0.0,4.0,54.0,0.015471892728210415,0.0,24.0,0.0,0.0,0.0,0.0,12.0,108.0,0.06291903042805569,82.0,46.0,0.36049510056730266,52.0,9.0,0.2142857142857143,6.0,30.0,0.025528623001547186,11.0,113.0,0.060340381640020625,0.0,0.0,0.0,34.0,207.0,0.08303249097472923,0.0,7.0,0.0,"popescu-belis, a"
+389,0.0,0.0,0.0,0.0,60.0,0.0,101.0,396.0,0.8887343532684283,0.0,0.0,0.0,0.0,0.0,0.0,0.0,92.0,0.0,0.0,0.0,0.0,0.0,34.0,0.0,0.0,41.0,0.0,8.0,18.0,0.03337969401947149,0.0,21.0,0.0,0.0,0.0,0.0,1.0,14.0,0.008344923504867872,0.0,16.0,0.0,5.0,91.0,0.041724617524339355,0.0,3.0,0.0,1.0,42.0,0.008344923504867872,0.0,0.0,0.0,3.0,8.0,0.01947148817802503,0.0,7.0,0.0,"tsarfaty, r"
+390,0.0,0.0,0.0,10.0,24.0,0.08267879288962382,7.0,34.0,0.057875155022736664,0.0,0.0,0.0,0.0,0.0,0.0,39.0,103.0,0.27904092600248037,0.0,0.0,0.0,0.0,35.0,0.0,0.0,6.0,0.0,1.0,5.0,0.00826787928896238,14.0,119.0,0.11575031004547333,0.0,0.0,0.0,13.0,21.0,0.07110376188507649,0.0,156.0,0.0,1.0,27.0,0.00496072757337743,19.0,27.0,0.15130219098801156,8.0,90.0,0.06283588259611411,0.0,0.0,0.0,8.0,8.0,0.07854485324514263,11.0,2.0,0.08763952046300125,"huang, t"
+391,0.0,0.0,0.0,1.0,305.0,0.00834028356964137,26.0,52.0,0.07645259938837921,0.0,0.0,0.0,0.0,0.0,0.0,4.0,256.0,0.013900472616068948,0.0,0.0,0.0,5.0,76.0,0.013900472616068948,0.0,322.0,0.0,51.0,57.0,0.20989713650264114,14.0,428.0,0.03002502085070893,0.0,0.0,0.0,59.0,113.0,0.19377258826800114,15.0,166.0,0.040589380038921326,7.0,38.0,0.019460661662496527,107.0,255.0,0.3094245204336948,9.0,195.0,0.0556018904642758,0.0,0.0,0.0,3.0,48.0,0.012510425354462054,9.0,144.0,0.01612454823463998,"chen, c"
+392,0.0,0.0,0.0,0.0,360.0,0.0,6.0,107.0,0.0968113714944295,0.0,0.0,0.0,0.0,0.0,0.0,16.0,199.0,0.10142143680368805,0.0,0.0,0.0,1.0,124.0,0.016135228582404917,0.0,227.0,0.0,0.0,54.0,0.0,12.0,258.0,0.08643872454859779,0.0,0.0,0.0,14.0,167.0,0.11294660007683442,2.0,136.0,0.016135228582404917,5.0,68.0,0.035497502881290825,5.0,158.0,0.05378409527468306,7.0,178.0,0.0968113714944295,0.0,0.0,0.0,4.0,161.0,0.04517864003073377,32.0,125.0,0.33883980023050325,"zhang, l"
+393,0.0,0.0,0.0,0.0,286.0,0.0,77.0,490.0,0.49182561307901906,0.0,0.0,0.0,0.0,0.0,0.0,0.0,49.0,0.0,0.0,0.0,0.0,0.0,72.0,0.0,4.0,262.0,0.0326975476839237,34.0,62.0,0.21525885558583105,0.0,22.0,0.0,0.0,0.0,0.0,10.0,111.0,0.04495912806539509,0.0,81.0,0.0,15.0,89.0,0.12261580381471389,6.0,24.0,0.03950953678474114,1.0,102.0,0.008174386920980926,0.0,0.0,0.0,0.0,67.0,0.0,6.0,20.0,0.04495912806539509,"goldberg, y"
+394,0.0,0.0,0.0,6.0,57.0,0.012140107881413219,81.0,538.0,0.15903541324651316,0.0,0.0,0.0,0.0,0.0,0.0,14.0,136.0,0.026252983293556086,0.0,0.0,0.0,1.0,31.0,0.0015175134851766524,0.0,85.0,0.0,29.0,76.0,0.06252155558927808,3.0,133.0,0.004552540455529957,0.0,0.0,0.0,8.0,49.0,0.018665415867672824,6.0,58.0,0.01065708333908149,104.0,1161.0,0.15442079269386236,45.0,230.0,0.06821223115869052,1.0,4.0,0.0015175134851766524,0.0,0.0,0.0,176.0,414.0,0.480506849504049,0.0,16.0,0.0,"habash, n"
+395,0.0,0.0,0.0,0.0,144.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,104.0,0.024291497975708502,0.0,0.0,0.0,7.0,113.0,0.04048582995951417,0.0,41.0,0.0,11.0,8.0,0.06747638326585693,26.0,102.0,0.14035087719298245,0.0,0.0,0.0,20.0,35.0,0.08232118758434549,12.0,48.0,0.05263157894736842,4.0,27.0,0.024291497975708502,51.0,31.0,0.4048582995951417,7.0,66.0,0.048582995951417005,0.0,0.0,0.0,1.0,53.0,0.005398110661268556,15.0,47.0,0.10931174089068826,"li, t"
+396,0.0,0.0,0.0,0.0,6.0,0.0,52.0,24.0,0.2736902927580893,0.0,0.0,0.0,0.0,0.0,0.0,11.0,15.0,0.04179506933744222,0.0,0.0,0.0,0.0,9.0,0.0,1.0,23.0,0.001155624036979969,68.0,0.0,0.2514445300462249,0.0,33.0,0.0,0.0,0.0,0.0,17.0,0.0,0.07145608628659474,12.0,0.0,0.048536209553158696,16.0,24.0,0.04969183359013867,30.0,1.0,0.0694337442218798,31.0,77.0,0.1456086286594761,0.0,0.0,0.0,7.0,15.0,0.02696456086286594,5.0,3.0,0.020223420647149458,"brew, c"
+397,0.0,0.0,0.0,0.0,230.0,0.0,10.0,109.0,0.0392156862745098,0.0,0.0,0.0,0.0,0.0,0.0,21.0,89.0,0.07745098039215687,0.0,0.0,0.0,0.0,90.0,0.0,0.0,132.0,0.0,38.0,7.0,0.38529411764705884,5.0,126.0,0.058823529411764705,0.0,0.0,0.0,56.0,122.0,0.20588235294117646,4.0,58.0,0.01568627450980392,0.0,69.0,0.0,3.0,47.0,0.008823529411764706,8.0,179.0,0.047058823529411764,0.0,0.0,0.0,27.0,49.0,0.08137254901960785,22.0,96.0,0.08039215686274509,"lu, w"
+398,0.0,0.0,0.0,6.0,58.0,0.027039675523893712,105.0,8.0,0.5478914253028964,0.0,0.0,0.0,0.0,0.0,0.0,0.0,78.0,0.0,0.0,0.0,0.0,0.0,42.0,0.0,0.0,17.0,0.0,1.0,0.0,0.002027975664292029,1.0,28.0,0.003379959440486714,0.0,0.0,0.0,4.0,10.0,0.008111902657168116,10.0,9.0,0.029574645104258748,2.0,12.0,0.008449898601216786,20.0,7.0,0.094638864333628,33.0,10.0,0.04621444542665488,0.0,0.0,0.0,51.0,48.0,0.12620248557017316,23.0,29.0,0.1064687223753315,"shen, l"
+399,0.0,0.0,0.0,0.0,23.0,0.0,48.0,128.0,0.4630225080385852,0.0,0.0,0.0,0.0,0.0,0.0,0.0,18.0,0.0,0.0,0.0,0.0,2.0,0.0,0.019292604501607715,0.0,15.0,0.0,32.0,35.0,0.30868167202572344,0.0,42.0,0.0,0.0,0.0,0.0,0.0,11.0,0.0,12.0,8.0,0.09324758842443728,7.0,78.0,0.05787781350482315,0.0,40.0,0.0,4.0,45.0,0.03858520900321543,0.0,0.0,0.0,0.0,13.0,0.0,2.0,51.0,0.019292604501607715,"rehbein, i"
+400,0.0,0.0,0.0,0.0,171.0,0.0,0.0,45.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,79.0,0.0,0.0,0.0,0.0,4.0,15.0,0.053691275167785234,0.0,212.0,0.0,17.0,121.0,0.16778523489932887,0.0,137.0,0.0,0.0,0.0,0.0,24.0,108.0,0.2080536912751678,0.0,120.0,0.0,5.0,27.0,0.03355704697986577,3.0,690.0,0.040268456375838924,31.0,52.0,0.2595078299776286,0.0,0.0,0.0,13.0,485.0,0.116331096196868,17.0,37.0,0.12080536912751678,"komachi, m"
+401,0.0,0.0,0.0,0.0,79.0,0.0,17.0,36.0,0.1845018450184502,0.0,0.0,0.0,0.0,0.0,0.0,0.0,66.0,0.0,0.0,0.0,0.0,0.0,58.0,0.0,10.0,260.0,0.04920049200492006,0.0,49.0,0.0,28.0,542.0,0.20295202952029526,0.0,0.0,0.0,1.0,140.0,0.00820008200082001,2.0,60.0,0.009840098400984012,8.0,95.0,0.07380073800738009,6.0,104.0,0.05740057400574007,0.0,58.0,0.0,0.0,0.0,0.0,3.0,23.0,0.036900369003690044,34.0,62.0,0.37720377203772043,"dredze, m"
+402,0.0,0.0,0.0,1.0,275.0,0.002204828574578326,56.0,35.0,0.13945540734207915,0.0,0.0,0.0,0.0,0.0,0.0,2.0,75.0,0.004409657149156652,0.0,0.0,0.0,5.0,18.0,0.011024142872891631,0.0,121.0,0.0,34.0,17.0,0.09370521441957887,3.0,48.0,0.007716900011024141,0.0,0.0,0.0,34.0,71.0,0.08301179583287399,5.0,28.0,0.00749641715356631,5.0,12.0,0.0101422114430603,10.0,105.0,0.025355528607650755,195.0,291.0,0.4020504905743578,0.0,0.0,0.0,33.0,344.0,0.06945210009921728,68.0,97.0,0.14397530591996477,"su, j"
+403,0.0,0.0,0.0,0.0,5.0,0.0,28.0,19.0,0.3020544142143254,0.0,0.0,0.0,0.0,0.0,0.0,0.0,36.0,0.0,0.0,0.0,0.0,1.0,34.0,0.00832870627429206,1.0,73.0,0.01110494169905608,28.0,2.0,0.20433092726263188,0.0,35.0,0.0,0.0,0.0,0.0,25.0,34.0,0.16657412548584122,0.0,13.0,0.0,12.0,20.0,0.09994447529150473,3.0,2.0,0.019988895058300947,2.0,8.0,0.01665741254858412,0.0,0.0,0.0,24.0,141.0,0.15991116046640758,1.0,16.0,0.01110494169905608,"dandapat, s"
+404,0.0,0.0,0.0,2.0,10.0,0.019212295869356386,7.0,85.0,0.0893371757925072,0.0,0.0,0.0,0.0,0.0,0.0,7.0,31.0,0.06340057636887608,0.0,0.0,0.0,7.0,36.0,0.0643611911623439,0.0,3.0,0.0,13.0,81.0,0.1248799231508165,0.0,31.0,0.0,0.0,0.0,0.0,0.0,31.0,0.0,3.0,1.0,0.04322766570605187,9.0,67.0,0.079731027857829,5.0,19.0,0.048030739673390964,56.0,71.0,0.41690682036503357,0.0,0.0,0.0,8.0,5.0,0.04130643611911623,1.0,9.0,0.009606147934678193,"de marneffe, m"
+405,0.0,0.0,0.0,6.0,104.0,0.045749142203583684,13.0,71.0,0.041174227983225314,0.0,0.0,0.0,0.0,0.0,0.0,28.0,39.0,0.06862371330537552,0.0,0.0,0.0,3.0,25.0,0.011437285550895921,0.0,11.0,0.0,11.0,98.0,0.08006099885627144,0.0,23.0,0.0,0.0,0.0,0.0,0.0,12.0,0.0,0.0,10.0,0.0,13.0,42.0,0.08120472741136103,1.0,19.0,0.0019062142584826533,119.0,495.0,0.6679374761723217,0.0,0.0,0.0,0.0,19.0,0.0,1.0,0.0,0.0019062142584826533,"bos, j"
+406,0.0,0.0,0.0,0.0,301.0,0.0,34.0,122.0,0.2699280575539569,0.0,0.0,0.0,0.0,0.0,0.0,0.0,74.0,0.0,0.0,0.0,0.0,4.0,159.0,0.02129496402877698,2.0,334.0,0.02589928057553957,6.0,46.0,0.05179856115107914,0.0,215.0,0.0,0.0,0.0,0.0,10.0,117.0,0.05352517985611511,11.0,63.0,0.0687769784172662,1.0,91.0,0.0034532374100719426,81.0,358.0,0.35683453237410073,11.0,212.0,0.06330935251798561,0.0,0.0,0.0,1.0,176.0,0.0057553956834532375,17.0,99.0,0.07942446043165467,"wu, s"
+407,0.0,0.0,0.0,3.0,1.0,0.008397658834506744,5.0,38.0,0.021771708089461928,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0063982162548622806,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,17.0,0.036078830548251194,0.0,7.0,0.0,10.0,0.0,0.027992196115022478,1.0,1.0,0.00746458563067266,0.0,0.0,0.0,196.0,102.0,0.8918968045272224,0.0,8.0,0.0,"khalilov, m"
+408,0.0,0.0,0.0,6.0,50.0,0.04399057344854674,62.0,36.0,0.5517151086671903,0.0,0.0,0.0,0.0,0.0,0.0,4.0,59.0,0.012568735271013353,0.0,0.0,0.0,3.0,21.0,0.009426551453260015,0.0,65.0,0.0,2.0,4.0,0.014663524482848913,0.0,53.0,0.0,0.0,0.0,0.0,0.0,32.0,0.0,4.0,16.0,0.012568735271013353,8.0,26.0,0.025137470542026707,19.0,1.0,0.12280701754385964,8.0,10.0,0.0788164440953129,0.0,0.0,0.0,6.0,24.0,0.032992930086410056,13.0,10.0,0.09531290913851793,"kim, m"
+409,0.0,0.0,0.0,4.0,2.0,0.01105634127240061,60.0,43.0,0.090723422551865,0.0,0.0,0.0,0.0,0.0,0.0,7.0,27.0,0.015356029545000845,0.0,0.0,0.0,5.0,8.0,0.011517022158750636,0.0,9.0,0.0,244.0,431.0,0.5133367116598333,0.0,28.0,0.0,0.0,0.0,0.0,37.0,42.0,0.02837794259916156,11.0,11.0,0.025798129635601418,29.0,210.0,0.06818077117980374,10.0,58.0,0.025798129635601418,69.0,50.0,0.10500453002871578,0.0,0.0,0.0,51.0,96.0,0.10300824618786567,1.0,3.0,0.0018427235454001015,"bond, f"
+410,0.0,0.0,0.0,10.0,24.0,0.017511800027414055,91.0,12.0,0.16627440808292404,0.0,0.0,0.0,0.0,0.0,0.0,8.0,68.0,0.01703850813478124,0.0,0.0,0.0,1.0,6.0,0.0009465837852656244,1.0,3.0,0.0014198756778984367,2.0,0.0,0.0014198756778984367,0.0,1.0,0.0,0.0,0.0,0.0,6.0,11.0,0.008708570824443745,19.0,37.0,0.019266053354133596,9.0,12.0,0.01254899646752142,38.0,31.0,0.06099441699966132,0.0,0.0,0.0,0.0,0.0,0.0,350.0,295.0,0.62855662978473,29.0,53.0,0.06531428118332809,"casacuberta, f"
+411,0.0,0.0,0.0,0.0,36.0,0.0,160.0,191.0,0.4646341463414634,0.0,0.0,0.0,0.0,0.0,0.0,0.0,43.0,0.0,0.0,0.0,0.0,13.0,3.0,0.03170731707317073,0.0,12.0,0.0,24.0,21.0,0.06341463414634146,0.0,0.0,0.0,0.0,0.0,0.0,11.0,12.0,0.026829268292682926,8.0,47.0,0.05853658536585366,34.0,15.0,0.09268292682926829,8.0,1.0,0.01951219512195122,3.0,25.0,0.01829268292682927,0.0,0.0,0.0,48.0,301.0,0.1402439024390244,21.0,24.0,0.08414634146341464,"sima{'}an, k"
+412,0.0,0.0,0.0,1.0,76.0,0.0026562104730584366,29.0,6.0,0.14533265874019732,0.0,0.0,0.0,0.0,0.0,0.0,16.0,211.0,0.06994687579053882,0.0,0.0,0.0,1.0,35.0,0.0026562104730584366,0.0,20.0,0.0,8.0,0.0,0.023905894257525928,0.0,15.0,0.0,0.0,0.0,0.0,0.0,31.0,0.0,16.0,117.0,0.08234252466481154,1.0,11.0,0.0035416139640779155,25.0,36.0,0.08057171768277258,32.0,7.0,0.09739438401214266,0.0,0.0,0.0,67.0,134.0,0.26764482671388823,31.0,49.0,0.22400708322792814,"galley, m"
+413,0.0,0.0,0.0,0.0,23.0,0.0,15.0,15.0,0.09039775010044196,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13.0,1.0,0.07834471675371636,21.0,2.0,0.0980313378867015,0.0,23.0,0.0,0.0,0.0,0.0,26.0,8.0,0.12856568903173968,0.0,12.0,0.0,1.0,12.0,0.00803535556448373,3.0,24.0,0.014463640016070716,0.0,0.0,0.0,0.0,0.0,0.0,82.0,239.0,0.4664523905182805,16.0,11.0,0.1157091201285657,"haque, r"
+414,0.0,0.0,0.0,0.0,147.0,0.0,1.0,19.0,0.0035555555555555557,0.0,0.0,0.0,0.0,0.0,0.0,10.0,114.0,0.026074074074074072,0.0,0.0,0.0,23.0,184.0,0.05096296296296296,1.0,138.0,0.0035555555555555557,15.0,16.0,0.050666666666666665,48.0,327.0,0.14399999999999996,0.0,0.0,0.0,23.0,64.0,0.064,24.0,207.0,0.057481481481481474,14.0,64.0,0.042666666666666665,2.0,22.0,0.005333333333333333,105.0,179.0,0.3300740740740741,0.0,0.0,0.0,6.0,12.0,0.008888888888888889,67.0,79.0,0.21274074074074076,"cardie, c"
+415,0.0,0.0,0.0,1.0,1.0,0.0037055335968379445,5.0,8.0,0.018527667984189724,0.0,0.0,0.0,0.0,0.0,0.0,9.0,10.0,0.018280632411067196,0.0,0.0,0.0,15.0,4.0,0.03977272727272727,0.0,0.0,0.0,89.0,34.0,0.2660573122529644,111.0,126.0,0.28631422924901184,0.0,0.0,0.0,34.0,66.0,0.0779397233201581,5.0,13.0,0.013216403162055334,41.0,38.0,0.10202569169960475,4.0,4.0,0.007411067193675889,50.0,50.0,0.14760375494071143,0.0,0.0,0.0,5.0,3.0,0.011734189723320158,3.0,5.0,0.007411067193675889,"wiebe, j"
+416,0.0,0.0,0.0,6.0,61.0,0.026086956521739132,75.0,259.0,0.47391304347826085,0.0,0.0,0.0,0.0,0.0,0.0,8.0,30.0,0.06782608695652174,0.0,0.0,0.0,0.0,7.0,0.0,2.0,6.0,0.020869565217391306,5.0,29.0,0.013043478260869566,1.0,6.0,0.010434782608695653,0.0,0.0,0.0,5.0,18.0,0.017391304347826087,2.0,29.0,0.013043478260869566,4.0,39.0,0.026086956521739132,36.0,28.0,0.16521739130434782,21.0,130.0,0.12956521739130436,0.0,0.0,0.0,4.0,20.0,0.026086956521739132,2.0,36.0,0.010434782608695653,"schuler, w"
+417,0.0,0.0,0.0,7.0,228.0,0.025478293010861165,4.0,12.0,0.007042780181864061,0.0,0.0,0.0,0.0,0.0,0.0,0.0,46.0,0.0,0.0,0.0,0.0,2.0,26.0,0.007954198793634706,0.0,89.0,0.0,4.0,20.0,0.020382634408688934,3.0,23.0,0.022371184107097607,0.0,0.0,0.0,3.0,11.0,0.022371184107097607,6.0,49.0,0.01657124748673897,1.0,35.0,0.0029828245476130146,12.0,55.0,0.03355677616064641,0.0,23.0,0.0,0.0,0.0,0.0,220.0,391.0,0.8391582882331768,1.0,14.0,0.0021305889625807244,"costa-juss{\`a}, m"
+418,0.0,0.0,0.0,0.0,35.0,0.0,71.0,322.0,0.3676100628930817,0.0,0.0,0.0,0.0,0.0,0.0,46.0,173.0,0.17083333333333334,0.0,0.0,0.0,5.0,6.0,0.0330188679245283,0.0,2.0,0.0,0.0,7.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,2.0,39.0,0.008805031446540879,9.0,10.0,0.03396226415094339,0.0,4.0,0.0,67.0,152.0,0.3643081761006289,0.0,0.0,0.0,1.0,4.0,0.001650943396226415,4.0,13.0,0.01981132075471698,"koller, a"
+419,0.0,0.0,0.0,3.0,86.0,0.011514104778353481,52.0,4.0,0.15927844943388983,0.0,0.0,0.0,0.0,0.0,0.0,3.0,31.0,0.00863557858376511,0.0,0.0,0.0,15.0,45.0,0.05411629245826137,0.0,44.0,0.0,45.0,48.0,0.17846862406447897,6.0,134.0,0.01727115716753022,0.0,0.0,0.0,29.0,63.0,0.10976779888696986,36.0,21.0,0.1111111111111111,37.0,31.0,0.1065054691997697,63.0,79.0,0.18288236422951445,9.0,63.0,0.024947227019765875,0.0,0.0,0.0,6.0,11.0,0.021109192093648046,5.0,22.0,0.014392630972941852,"lu, q"
+420,0.0,0.0,0.0,0.0,14.0,0.0,4.0,27.0,0.026292725679228742,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13.0,0.0,0.0,0.0,0.0,2.0,5.0,0.013146362839614371,0.0,5.0,0.0,5.0,16.0,0.03680981595092024,0.0,27.0,0.0,0.0,0.0,0.0,20.0,11.0,0.1380368098159509,10.0,8.0,0.06573181419807186,1.0,10.0,0.0065731814198071855,13.0,34.0,0.08545135845749341,0.0,12.0,0.0,0.0,0.0,0.0,48.0,162.0,0.41761612620508326,32.0,11.0,0.21034180543382994,"lu, b"
+421,0.0,0.0,0.0,1.0,15.0,0.0015797788309636653,4.0,14.0,0.04054765666140074,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0036861506055818854,0.0,0.0,0.0,1.0,2.0,0.0036861506055818854,0.0,0.0,0.0,15.0,1.0,0.11058451816745657,0.0,0.0,0.0,0.0,0.0,0.0,6.0,1.0,0.03317535545023697,3.0,0.0,0.018430753027909426,9.0,21.0,0.08056872037914693,3.0,0.0,0.016587677725118485,0.0,0.0,0.0,0.0,0.0,0.0,91.0,175.0,0.6800947867298578,2.0,1.0,0.011058451816745658,"hoang, h"
+422,0.0,0.0,0.0,0.0,0.0,0.0,54.0,0.0,0.21511572073369828,0.0,0.0,0.0,0.0,0.0,0.0,4.0,5.0,0.009219245174301356,0.0,0.0,0.0,8.0,3.0,0.04929735266813919,6.0,3.0,0.04033419763756843,22.0,0.0,0.11405614776401296,0.0,2.0,0.0,0.0,0.0,0.0,44.0,23.0,0.22116585037933353,14.0,36.0,0.05883671052210379,11.0,4.0,0.0282339383462979,20.0,23.0,0.0817887896539582,2.0,1.0,0.013444732545856142,0.0,0.0,0.0,2.0,0.0,0.011203943788213452,24.0,21.0,0.15730337078651688,"nakagawa, h"
+423,0.0,0.0,0.0,0.0,70.0,0.0,35.0,134.0,0.22265624999999997,0.0,0.0,0.0,0.0,0.0,0.0,5.0,90.0,0.029296874999999997,0.0,0.0,0.0,0.0,46.0,0.0,0.0,81.0,0.0,59.0,195.0,0.4726562499999999,8.0,402.0,0.09374999999999999,0.0,0.0,0.0,0.0,105.0,0.0,8.0,129.0,0.06835937499999999,1.0,90.0,0.011718749999999998,5.0,50.0,0.04296874999999999,0.0,312.0,0.0,0.0,0.0,0.0,0.0,79.0,0.0,7.0,129.0,0.05859374999999999,"das, d"
+424,0.0,0.0,0.0,0.0,229.0,0.0,6.0,8.0,0.05120910384068279,0.0,0.0,0.0,0.0,0.0,0.0,2.0,329.0,0.017069701280227598,0.0,0.0,0.0,1.0,162.0,0.008534850640113799,0.0,43.0,0.0,24.0,97.0,0.09359886201991464,0.0,933.0,0.0,0.0,0.0,0.0,27.0,87.0,0.14224751066856328,3.0,75.0,0.012802275960170698,14.0,77.0,0.11664295874822192,13.0,21.0,0.07283072546230442,0.0,73.0,0.0,0.0,0.0,0.0,18.0,361.0,0.08307254623044097,59.0,147.0,0.4019914651493599,"ekbal, a"
+425,0.0,0.0,0.0,0.0,21.0,0.0,180.0,75.0,0.7382557086812191,0.0,0.0,0.0,0.0,0.0,0.0,4.0,92.0,0.012071220199175134,0.0,0.0,0.0,1.0,7.0,0.004526707574690675,4.0,16.0,0.004023740066391711,9.0,6.0,0.07544512624484459,0.0,6.0,0.0,0.0,0.0,0.0,0.0,27.0,0.0,2.0,7.0,0.004023740066391711,11.0,8.0,0.02534956241826778,2.0,1.0,0.006035610099587567,31.0,44.0,0.11437481138718439,0.0,0.0,0.0,8.0,1.0,0.01589377326224726,0.0,30.0,0.0,"hockenmaier, j"
+426,0.0,0.0,0.0,2.0,317.0,0.004427325452694026,39.0,31.0,0.07632709080444501,0.0,0.0,0.0,0.0,0.0,0.0,25.0,670.0,0.03231947580466639,0.0,0.0,0.0,2.0,285.0,0.0026563952716164158,14.0,320.0,0.01878450942071608,16.0,5.0,0.024793022535086546,6.0,85.0,0.00841191836011865,0.0,0.0,0.0,115.0,115.0,0.179395227343162,7.0,333.0,0.009474476468765215,13.0,65.0,0.015558886590896149,208.0,121.0,0.3011340277909542,7.0,58.0,0.009651569486872976,0.0,0.0,0.0,65.0,162.0,0.08850856055000031,124.0,256.0,0.22855751412000577,"gao, j"
+427,0.0,0.0,0.0,0.0,52.0,0.0,0.0,97.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,20.0,0.0,0.0,0.0,0.0,0.0,35.0,0.0,0.0,68.0,0.0,0.0,22.0,0.0,0.0,29.0,0.0,0.0,0.0,0.0,63.0,14.0,0.542649727767695,0.0,15.0,0.0,0.0,41.0,0.0,40.0,35.0,0.3030852994555354,14.0,116.0,0.12704174228675136,0.0,0.0,0.0,3.0,59.0,0.02722323049001815,0.0,20.0,0.0,"zhu, m"
+428,0.0,0.0,0.0,0.0,5.0,0.0,21.0,0.0,0.06884139482564679,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,0.0202474690663667,0.0,0.0,0.0,0.0,10.0,0.0,2.0,7.0,0.0033745781777277835,70.0,69.0,0.2321709786276715,14.0,22.0,0.03914510686164229,0.0,0.0,0.0,13.0,10.0,0.029246344206974122,19.0,8.0,0.05354330708661416,12.0,7.0,0.02913385826771653,121.0,29.0,0.2760404949381327,20.0,3.0,0.06344206974128233,0.0,0.0,0.0,61.0,88.0,0.13689538807649043,31.0,3.0,0.04791901012373453,"tsou, b"
+429,0.0,0.0,0.0,0.0,45.0,0.0,113.0,156.0,0.44496181784317373,0.0,0.0,0.0,0.0,0.0,0.0,7.0,22.0,0.02793816353138387,0.0,0.0,0.0,0.0,0.0,0.0,0.0,24.0,0.0,27.0,17.0,0.11659526913764202,2.0,12.0,0.007450176941702365,0.0,0.0,0.0,2.0,18.0,0.00651890482398957,1.0,6.0,0.005587632706276774,52.0,113.0,0.14881728441050474,1.0,5.0,0.005587632706276774,25.0,215.0,0.0806481653939281,0.0,0.0,0.0,33.0,2.0,0.11864406779661017,7.0,10.0,0.037250884708511824,"oepen, s"
+430,0.0,0.0,0.0,0.0,305.0,0.0,0.0,315.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,95.0,0.0,0.0,0.0,0.0,2.0,65.0,0.02127659574468085,6.0,131.0,0.06382978723404255,7.0,55.0,0.07446808510638298,31.0,182.0,0.32978723404255317,0.0,0.0,0.0,29.0,84.0,0.16666666666666666,26.0,561.0,0.1790780141843972,0.0,41.0,0.0,15.0,146.0,0.1595744680851064,1.0,136.0,0.005319148936170213,0.0,0.0,0.0,0.0,88.0,0.0,0.0,45.0,0.0,"wan, x"
+431,0.0,0.0,0.0,0.0,236.0,0.0,63.0,76.0,0.12596153846153846,0.0,0.0,0.0,0.0,0.0,0.0,34.0,122.0,0.0658653846153846,0.0,0.0,0.0,11.0,87.0,0.030769230769230767,0.0,154.0,0.0,58.0,34.0,0.11923076923076924,1.0,147.0,0.001923076923076923,0.0,0.0,0.0,14.0,70.0,0.025480769230769227,21.0,29.0,0.04951923076923076,9.0,72.0,0.017307692307692305,132.0,222.0,0.41778846153846144,18.0,87.0,0.04134615384615384,0.0,0.0,0.0,13.0,97.0,0.024038461538461533,42.0,60.0,0.08076923076923076,"yang, j"
+432,0.0,0.0,0.0,0.0,350.0,0.0,0.0,87.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,47.0,224.0,0.16725978647686834,0.0,0.0,0.0,0.0,156.0,0.0,0.0,243.0,0.0,0.0,26.0,0.0,0.0,201.0,0.0,0.0,0.0,0.0,1.0,168.0,0.005338078291814947,0.0,146.0,0.0,0.0,29.0,0.0,68.0,251.0,0.5791814946619217,2.0,70.0,0.010676156583629894,0.0,0.0,0.0,7.0,400.0,0.045373665480427046,24.0,115.0,0.19217081850533807,"liu, s"
+433,0.0,0.0,0.0,3.0,82.0,0.004733727810650888,26.0,5.0,0.05233399079552926,0.0,0.0,0.0,0.0,0.0,0.0,22.0,45.0,0.059171597633136085,0.0,0.0,0.0,53.0,29.0,0.14385272846811303,0.0,44.0,0.0,25.0,22.0,0.06403681788297172,0.0,133.0,0.0,0.0,0.0,0.0,55.0,121.0,0.12741617357001972,16.0,75.0,0.03760683760683761,25.0,42.0,0.055424063116370804,121.0,266.0,0.28665351742274814,48.0,35.0,0.06015779092702169,0.0,0.0,0.0,7.0,42.0,0.017751479289940825,42.0,90.0,0.09086127547666008,"hsu, w"
+434,0.0,0.0,0.0,0.0,72.0,0.0,2.0,66.0,0.015238095238095238,0.0,0.0,0.0,0.0,0.0,0.0,2.0,20.0,0.022857142857142857,0.0,0.0,0.0,0.0,17.0,0.0,0.0,39.0,0.0,17.0,6.0,0.17142857142857143,2.0,24.0,0.011428571428571429,0.0,0.0,0.0,9.0,0.0,0.05142857142857143,29.0,27.0,0.23619047619047617,11.0,16.0,0.12571428571428572,6.0,31.0,0.06095238095238095,0.0,42.0,0.0,0.0,0.0,0.0,8.0,15.0,0.07238095238095238,31.0,11.0,0.23238095238095235,"ye, y"
+435,0.0,0.0,0.0,0.0,47.0,0.0,98.0,111.0,0.25451697127937334,0.0,0.0,0.0,0.0,0.0,0.0,7.0,22.0,0.017232375979112275,0.0,0.0,0.0,2.0,2.0,0.006266318537859009,0.0,75.0,0.0,10.0,36.0,0.02819843342036554,1.0,42.0,0.0031331592689295045,0.0,0.0,0.0,14.0,11.0,0.03133159268929504,95.0,35.0,0.15509138381201046,7.0,34.0,0.008772845953002612,28.0,208.0,0.08302872062663186,33.0,4.0,0.09420365535248043,0.0,0.0,0.0,117.0,38.0,0.31248041775456925,3.0,40.0,0.005744125326370758,"dras, m"
+436,0.0,0.0,0.0,7.0,18.0,0.013671753624346372,15.0,0.0,0.026189398176507662,0.0,0.0,0.0,0.0,0.0,0.0,14.0,58.0,0.03149386102751218,0.0,0.0,0.0,3.0,4.0,0.004638630693974662,0.0,0.0,0.0,102.0,59.0,0.19482248914693578,8.0,13.0,0.01503892898678101,0.0,0.0,0.0,17.0,2.0,0.032714553315400255,136.0,22.0,0.17694711517120762,34.0,49.0,0.06066174838646673,30.0,11.0,0.049360357241146645,16.0,39.0,0.037109045551797296,0.0,0.0,0.0,178.0,74.0,0.32707894993829956,11.0,7.0,0.030273168739624108,"dorr, b"
+437,0.0,0.0,0.0,0.0,53.0,0.0,0.0,15.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,21.0,0.0037629350893697076,0.0,0.0,0.0,1.0,1.0,0.0018814675446848538,1.0,68.0,0.0018814675446848538,16.0,80.0,0.042333019755409214,0.0,8.0,0.0,0.0,0.0,0.0,88.0,46.0,0.25305738476011286,1.0,75.0,0.0018814675446848538,6.0,23.0,0.01458137347130762,56.0,51.0,0.09275634995296331,17.0,64.0,0.0456255879586077,0.0,0.0,0.0,148.0,444.0,0.45597365945437446,48.0,30.0,0.08626528692380053,"carpuat, m"
+438,0.0,0.0,0.0,0.0,126.0,0.0,20.0,36.0,0.13473589973142344,0.0,0.0,0.0,0.0,0.0,0.0,4.0,83.0,0.03581020590868397,0.0,0.0,0.0,0.0,39.0,0.0,0.0,176.0,0.0,1.0,12.0,0.006714413607878245,0.0,75.0,0.0,0.0,0.0,0.0,3.0,54.0,0.01611459265890779,0.0,41.0,0.0,0.0,21.0,0.0,3.0,66.0,0.04028648164726947,28.0,86.0,0.2596239928379588,0.0,0.0,0.0,31.0,55.0,0.19113697403760072,37.0,73.0,0.3155774395702775,"nguyen, m"
+439,0.0,0.0,0.0,0.0,0.0,0.0,28.0,17.0,0.0885303096380288,0.0,0.0,0.0,0.0,0.0,0.0,18.0,8.0,0.052333187963366765,0.0,0.0,0.0,1.0,0.0,0.0039249890972525075,35.0,15.0,0.22197993894461404,13.0,42.0,0.0340165721761884,0.0,8.0,0.0,0.0,0.0,0.0,4.0,7.0,0.010466637592673354,57.0,12.0,0.2285215874400349,40.0,11.0,0.15045791539467945,5.0,30.0,0.01569995638901003,0.0,12.0,0.0,0.0,0.0,0.0,55.0,27.0,0.18229393807239422,3.0,12.0,0.011774967291757523,"hwa, r"
+440,0.0,0.0,0.0,0.0,42.0,0.0,12.0,9.0,0.033834586466165426,0.0,0.0,0.0,0.0,0.0,0.0,86.0,42.0,0.33928571428571436,0.0,0.0,0.0,12.0,12.0,0.04605263157894738,0.0,80.0,0.0,11.0,7.0,0.03947368421052633,0.0,303.0,0.0,0.0,0.0,0.0,29.0,38.0,0.09774436090225566,83.0,28.0,0.262218045112782,15.0,20.0,0.060150375939849635,6.0,6.0,0.021616541353383464,11.0,21.0,0.03853383458646618,0.0,0.0,0.0,10.0,18.0,0.030075187969924817,6.0,15.0,0.03101503759398497,"paris, c"
+441,0.0,0.0,0.0,0.0,116.0,0.0,0.0,13.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,77.0,0.039799863543324994,0.0,0.0,0.0,17.0,25.0,0.10148965203547874,0.0,125.0,0.0,5.0,8.0,0.039799863543324994,0.0,43.0,0.0,0.0,0.0,0.0,16.0,76.0,0.08238571753468275,0.0,37.0,0.0,1.0,32.0,0.004775983625199,26.0,72.0,0.1746645440072777,1.0,42.0,0.004775983625199,0.0,0.0,0.0,85.0,52.0,0.5355924493973163,3.0,33.0,0.016715942688196497,"hu, x"
+442,0.0,0.0,0.0,0.0,31.0,0.0,15.0,1.0,0.13222942164626827,0.0,0.0,0.0,0.0,0.0,0.0,0.0,31.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,28.0,0.0,0.0,18.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,57.0,0.0,6.0,15.0,0.03263738900887929,5.0,5.0,0.03959683225341972,25.0,19.0,0.1749460043196544,0.0,5.0,0.0,0.0,0.0,0.0,86.0,338.0,0.5572354211663066,4.0,41.0,0.06335493160547156,"khadivi, s"
+443,0.0,0.0,0.0,4.0,21.0,0.02260738507912585,5.0,185.0,0.01758352172820899,0.0,0.0,0.0,0.0,0.0,0.0,10.0,5.0,0.05023863350916855,0.0,0.0,0.0,0.0,1.0,0.0,0.0,5.0,0.0,19.0,21.0,0.13376036171816125,0.0,1.0,0.0,0.0,0.0,0.0,5.0,7.0,0.03767897513187641,3.0,10.0,0.0146948003014318,3.0,64.0,0.016955538809344386,14.0,20.0,0.09181110273800552,15.0,1.0,0.11303692539562923,0.0,0.0,0.0,65.0,197.0,0.47902537050992217,3.0,17.0,0.02260738507912585,"schwartz, l"
+444,0.0,0.0,0.0,8.0,6.0,0.06015037593984962,1.0,0.0,0.007518796992481203,0.0,0.0,0.0,0.0,0.0,0.0,6.0,7.0,0.03609022556390978,0.0,0.0,0.0,2.0,8.0,0.015037593984962405,2.0,0.0,0.012030075187969926,27.0,4.0,0.2827067669172932,0.0,0.0,0.0,0.0,0.0,0.0,18.0,3.0,0.13533834586466165,1.0,37.0,0.010025062656641603,3.0,9.0,0.022556390977443608,0.0,1.0,0.0,0.0,24.0,0.0,0.0,0.0,0.0,43.0,5.0,0.3779448621553885,6.0,7.0,0.0406015037593985,"soderland, s"
+445,0.0,0.0,0.0,0.0,3.0,0.0,4.0,17.0,0.019586240665932186,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,10.0,0.01468968049944914,0.0,32.0,0.0,0.0,0.0,0.0,6.0,32.0,0.025706940874035994,11.0,0.0,0.04113110539845759,11.0,4.0,0.037213857265271155,167.0,24.0,0.6238217652099401,30.0,0.0,0.10796915167095117,0.0,0.0,0.0,3.0,0.0,0.005508630187293427,29.0,0.0,0.12437262822866939,"chan, s"
+446,0.0,0.0,0.0,0.0,33.0,0.0,10.0,1.0,0.03609106052193226,0.0,0.0,0.0,0.0,0.0,0.0,4.0,44.0,0.007403294466037386,0.0,0.0,0.0,7.0,3.0,0.01943364797334814,0.0,42.0,0.0,7.0,0.0,0.03146400148065889,0.0,50.0,0.0,0.0,0.0,0.0,157.0,118.0,0.5596890616324264,0.0,54.0,0.0,14.0,1.0,0.06848047381084582,22.0,2.0,0.06274292059966685,3.0,5.0,0.009254118082546732,0.0,0.0,0.0,38.0,1.0,0.17212659633536925,6.0,12.0,0.03331482509716824,"nie, j"
+447,0.0,0.0,0.0,2.0,412.0,0.011773940345368916,0.0,46.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,276.0,0.0,0.0,0.0,0.0,0.0,163.0,0.0,0.0,203.0,0.0,3.0,47.0,0.018315018315018312,0.0,56.0,0.0,0.0,0.0,0.0,3.0,83.0,0.009419152276295134,2.0,128.0,0.010465724751439037,0.0,29.0,0.0,0.0,44.0,0.0,2.0,113.0,0.006279434850863423,0.0,0.0,0.0,102.0,164.0,0.7545787545787545,30.0,154.0,0.1891679748822606,"he, x"
+448,0.0,0.0,0.0,1.0,0.0,0.0026262036766851477,28.0,14.0,0.3151444412022177,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.017508024511234316,0.0,0.0,0.0,2.0,1.0,0.011380215932302306,0.0,18.0,0.0,0.0,18.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,3.0,0.03939305515027721,0.0,4.0,0.0,0.0,2.0,0.0,0.0,2.0,0.0,9.0,0.0,0.10942515319521448,0.0,0.0,0.0,75.0,86.0,0.4992704989786986,2.0,11.0,0.0052524073533702954,"cancedda, n"
+449,0.0,0.0,0.0,4.0,81.0,0.004729809625162587,43.0,91.0,0.1228568050135982,0.0,0.0,0.0,0.0,0.0,0.0,1.0,99.0,0.0017736786094359703,0.0,0.0,0.0,1.0,26.0,0.0023649048125812936,1.0,14.0,0.0035473572188719407,15.0,31.0,0.04138583422017264,0.0,7.0,0.0,0.0,0.0,0.0,12.0,41.0,0.03192621496984747,21.0,52.0,0.054392810689369755,5.0,62.0,0.015371881281778408,6.0,36.0,0.01891923850065035,8.0,36.0,0.01513539080052028,0.0,0.0,0.0,214.0,246.0,0.6255173229277522,21.0,81.0,0.06207875133025896,"quirk, c"
+450,0.0,0.0,0.0,3.0,77.0,0.021887159533073932,2.0,137.0,0.029182879377431907,0.0,0.0,0.0,0.0,0.0,0.0,4.0,76.0,0.0321011673151751,0.0,0.0,0.0,0.0,87.0,0.0,0.0,80.0,0.0,40.0,27.0,0.30642023346303504,7.0,106.0,0.06809338521400778,0.0,0.0,0.0,15.0,43.0,0.13229571984435795,9.0,79.0,0.08754863813229573,13.0,49.0,0.07587548638132297,12.0,148.0,0.08365758754863814,15.0,32.0,0.10943579766536965,0.0,0.0,0.0,1.0,41.0,0.014591439688715954,4.0,7.0,0.038910505836575876,"yang, c"
+451,0.0,0.0,0.0,0.0,28.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,180.0,0.017621145374449337,0.0,0.0,0.0,0.0,14.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,20.0,0.09581497797356828,0.0,4.0,0.0,0.0,13.0,0.0,19.0,33.0,0.22466960352422907,1.0,1.0,0.013215859030837005,0.0,0.0,0.0,86.0,34.0,0.6486784140969163,0.0,42.0,0.0,"lane, i"
+452,0.0,0.0,0.0,1.0,273.0,0.002027486929950326,39.0,193.0,0.08616819452288887,0.0,0.0,0.0,0.0,0.0,0.0,51.0,327.0,0.08251871804897828,0.0,0.0,0.0,17.0,97.0,0.024633966198896465,0.0,339.0,0.0,93.0,38.0,0.3144342587362964,0.0,147.0,0.0,0.0,0.0,0.0,40.0,89.0,0.09326439877771502,0.0,44.0,0.0,15.0,27.0,0.02280922796194117,32.0,60.0,0.05271466017870848,42.0,78.0,0.1694979073438473,0.0,0.0,0.0,79.0,365.0,0.12557385121142342,9.0,137.0,0.026357330089354248,"kim, y"
+453,0.0,0.0,0.0,3.0,45.0,0.0037496875260394974,137.0,32.0,0.23998000166652783,0.0,0.0,0.0,0.0,0.0,0.0,10.0,21.0,0.01916506957753521,0.0,0.0,0.0,1.0,129.0,0.0016665277893508877,0.0,50.0,0.0,23.0,15.0,0.04491292392300642,0.0,6.0,0.0,0.0,0.0,0.0,3.0,99.0,0.004499625031247397,8.0,39.0,0.01583201399883343,25.0,58.0,0.03874677110240814,42.0,28.0,0.06957753520539957,91.0,46.0,0.1799850012498959,0.0,0.0,0.0,85.0,127.0,0.15707024414632118,80.0,46.0,0.22481459878343474,"toutanova, k"
+454,0.0,0.0,0.0,12.0,11.0,0.022333891680625346,60.0,6.0,0.17308766052484645,0.0,0.0,0.0,0.0,0.0,0.0,5.0,2.0,0.02233389168062535,0.0,0.0,0.0,4.0,0.0,0.01340033500837521,0.0,0.0,0.0,42.0,22.0,0.12618648799553323,1.0,3.0,0.0033500837520938024,0.0,0.0,0.0,56.0,0.0,0.15633724176437747,9.0,11.0,0.024567280848687884,20.0,19.0,0.07314349525404802,31.0,55.0,0.07928531546621997,13.0,0.0,0.04131769960915689,0.0,0.0,0.0,69.0,48.0,0.18648799553322165,32.0,1.0,0.07816862088218872,"suzuki, h"
+455,0.0,0.0,0.0,0.0,14.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12.0,42.0,0.04212454212454213,0.0,0.0,0.0,13.0,30.0,0.06153846153846154,0.0,0.0,0.0,24.0,0.0,0.1446886446886447,12.0,9.0,0.032967032967032975,0.0,0.0,0.0,61.0,90.0,0.2670329670329671,3.0,1.0,0.011355311355311357,9.0,0.0,0.0457875457875458,4.0,1.0,0.02197802197802198,54.0,9.0,0.2516483516483517,0.0,0.0,0.0,13.0,2.0,0.07142857142857144,11.0,21.0,0.04945054945054945,"pantel, p"
+456,0.0,0.0,0.0,0.0,57.0,0.0,86.0,10.0,0.5815258994641491,0.0,0.0,0.0,0.0,0.0,0.0,0.0,42.0,0.0,0.0,0.0,0.0,0.0,9.0,0.0,0.0,68.0,0.0,27.0,24.0,0.3036488900229651,0.0,78.0,0.0,0.0,0.0,0.0,8.0,41.0,0.06889512630773156,0.0,20.0,0.0,4.0,3.0,0.016841030875223272,2.0,1.0,0.006124011227353917,0.0,5.0,0.0,0.0,0.0,0.0,2.0,111.0,0.015310028068384792,1.0,37.0,0.007655014034192396,"yoshinaga, n"
+457,0.0,0.0,0.0,0.0,1.0,0.0,4.0,7.0,0.028114186851211073,0.0,0.0,0.0,0.0,0.0,0.0,3.0,127.0,0.03027681660899654,0.0,0.0,0.0,0.0,19.0,0.0,2.0,5.0,0.010380622837370243,25.0,32.0,0.2703287197231834,6.0,62.0,0.054498269896193774,0.0,0.0,0.0,39.0,10.0,0.3494809688581315,2.0,7.0,0.025951557093425608,1.0,8.0,0.012975778546712804,2.0,10.0,0.01730103806228374,1.0,0.0,0.00865051903114187,0.0,0.0,0.0,0.0,1.0,0.0,22.0,39.0,0.1920415224913495,"kaji, n"
+458,0.0,0.0,0.0,1.0,85.0,0.0037332005973120955,2.0,15.0,0.014932802389248382,0.0,0.0,0.0,0.0,0.0,0.0,8.0,45.0,0.048531607765057244,0.0,0.0,0.0,1.0,298.0,0.007466401194624191,0.0,93.0,0.0,0.0,14.0,0.0,0.0,19.0,0.0,0.0,0.0,0.0,11.0,125.0,0.1642608262817322,0.0,15.0,0.0,2.0,44.0,0.018666002986560477,1.0,2.0,0.004977600796416127,82.0,110.0,0.3330014932802389,0.0,0.0,0.0,14.0,15.0,0.06968641114982577,49.0,71.0,0.3347436535589845,"yih, w"
+459,0.0,0.0,0.0,2.0,542.0,0.012281926029309142,1.0,76.0,0.007676203768318213,0.0,0.0,0.0,0.0,0.0,0.0,6.0,277.0,0.09211444521981856,0.0,0.0,0.0,0.0,332.0,0.0,0.0,202.0,0.0,0.0,15.0,0.0,0.0,59.0,0.0,0.0,0.0,0.0,0.0,99.0,0.0,3.0,104.0,0.04605722260990928,10.0,87.0,0.15352407536636425,18.0,76.0,0.1842288904396371,0.0,152.0,0.0,0.0,0.0,0.0,65.0,364.0,0.25847871598046057,24.0,112.0,0.24563852058618282,"feng, y"
+460,0.0,0.0,0.0,5.0,63.0,0.026896692250220743,26.0,120.0,0.25148407253956395,0.0,0.0,0.0,0.0,0.0,0.0,0.0,41.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,4.0,0.0,0.0,4.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,53.0,0.0,0.0,2.0,0.0,0.0,3.0,0.0,9.0,1.0,0.08069007675066224,0.0,4.0,0.0,0.0,0.0,0.0,127.0,161.0,0.6140324662093325,3.0,13.0,0.026896692250220743,"mi, h"
+461,0.0,0.0,0.0,0.0,1.0,0.0,36.0,33.0,0.1530317613089509,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.011549566891241578,0.0,0.0,0.0,0.0,0.0,0.0,0.0,18.0,0.0,8.0,1.0,0.03464870067372473,0.0,63.0,0.0,0.0,0.0,0.0,11.0,28.0,0.055822906641000966,14.0,3.0,0.08084696823869104,1.0,1.0,0.005774783445620789,8.0,9.0,0.038498556304138586,94.0,25.0,0.408084696823869,0.0,0.0,0.0,32.0,2.0,0.10009624639076033,22.0,1.0,0.1116458132820019,"haghighi, a"
+462,0.0,0.0,0.0,4.0,147.0,0.0035396141820541564,299.0,317.0,0.45684620376378976,0.0,0.0,0.0,0.0,0.0,0.0,10.0,234.0,0.013096572473600378,0.0,0.0,0.0,0.0,29.0,0.0,11.0,88.0,0.015043360273730164,19.0,25.0,0.019821839419503277,0.0,9.0,0.0,0.0,0.0,0.0,29.0,56.0,0.04489410654238688,3.0,73.0,0.0035396141820541564,13.0,33.0,0.02743200991091971,32.0,114.0,0.04395020942717244,25.0,183.0,0.04365524157866793,0.0,0.0,0.0,115.0,115.0,0.15102353843431066,130.0,153.0,0.1771576898118105,"klein, d"
+463,0.0,0.0,0.0,0.0,162.0,0.0,0.0,19.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,355.0,0.0,0.0,0.0,0.0,0.0,414.0,0.0,0.0,217.0,0.0,14.0,73.0,0.10096153846153845,37.0,350.0,0.2848557692307692,0.0,0.0,0.0,4.0,37.0,0.05769230769230768,0.0,406.0,0.0,1.0,46.0,0.00721153846153846,0.0,55.0,0.0,28.0,137.0,0.2115384615384615,0.0,0.0,0.0,0.0,79.0,0.0,53.0,36.0,0.3377403846153846,"choi, y"
+464,0.0,0.0,0.0,0.0,133.0,0.0,32.0,89.0,0.18068910256410253,0.0,0.0,0.0,0.0,0.0,0.0,0.0,194.0,0.0,0.0,0.0,0.0,4.0,235.0,0.02604166666666666,0.0,271.0,0.0,3.0,32.0,0.014423076923076922,0.0,140.0,0.0,0.0,0.0,0.0,1.0,141.0,0.004006410256410255,4.0,129.0,0.01923076923076923,0.0,32.0,0.0,10.0,85.0,0.08012820512820511,2.0,113.0,0.00801282051282051,0.0,0.0,0.0,123.0,427.0,0.6434294871794871,3.0,63.0,0.024038461538461533,"zhang, d"
+465,0.0,0.0,0.0,4.0,113.0,0.0025369575584167857,49.0,141.0,0.07136862184072484,0.0,0.0,0.0,0.0,0.0,0.0,73.0,193.0,0.07058655221745351,0.0,0.0,0.0,1.0,21.0,0.0020028612303290413,2.0,82.0,0.0020028612303290413,59.0,81.0,0.07704339532665713,6.0,117.0,0.008011444921316165,0.0,0.0,0.0,32.0,110.0,0.034716261325703386,48.0,138.0,0.06949928469241774,32.0,66.0,0.036051502145922745,141.0,126.0,0.16044825941821647,258.0,206.0,0.3601144492131617,0.0,0.0,0.0,33.0,75.0,0.042594182164997614,48.0,87.0,0.06302336671435384,"jurafsky, d"
+466,0.0,0.0,0.0,5.0,13.0,0.01672137239115625,19.0,11.0,0.08670341239858796,0.0,0.0,0.0,0.0,0.0,0.0,150.0,159.0,0.5196630953118226,0.0,0.0,0.0,2.0,14.0,0.008670341239858796,0.0,4.0,0.0,1.0,8.0,0.004335170619929398,1.0,99.0,0.004335170619929398,0.0,0.0,0.0,5.0,6.0,0.02167585309964699,1.0,49.0,0.0026011023719576393,16.0,62.0,0.04186536198674676,13.0,19.0,0.055056666873103365,42.0,139.0,0.14690035300675047,0.0,0.0,0.0,10.0,5.0,0.05072149625317396,11.0,34.0,0.04075060382733635,"riccardi, g"
+467,0.0,0.0,0.0,2.0,67.0,0.01360914534567229,7.0,11.0,0.04899292324442025,0.0,0.0,0.0,0.0,0.0,0.0,7.0,55.0,0.03810560696788241,0.0,0.0,0.0,22.0,140.0,0.17419706042460534,0.0,26.0,0.0,5.0,1.0,0.04082743603701688,0.0,81.0,0.0,0.0,0.0,0.0,64.0,62.0,0.38704409363091996,7.0,19.0,0.03810560696788242,4.0,6.0,0.0195971692977681,26.0,38.0,0.1350027218290691,11.0,10.0,0.07675557974959171,0.0,0.0,0.0,1.0,43.0,0.008165487207403375,4.0,55.0,0.0195971692977681,"chua, t"
+468,0.0,0.0,0.0,0.0,49.0,0.0,43.0,42.0,0.2996323529411764,0.0,0.0,0.0,0.0,0.0,0.0,0.0,13.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,10.0,0.0,0.0,14.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,18.0,0.027573529411764705,0.0,17.0,0.0,0.0,8.0,0.0,0.0,15.0,0.0,0.0,8.0,0.0,0.0,0.0,0.0,60.0,150.0,0.4669117647058823,28.0,53.0,0.20588235294117646,"denero, j"
+469,0.0,0.0,0.0,9.0,21.0,0.007428807263722658,453.0,276.0,0.4302861466501582,0.0,0.0,0.0,0.0,0.0,0.0,26.0,11.0,0.028545879763378735,0.0,0.0,0.0,32.0,7.0,0.03095336359884441,22.0,1.0,0.023043059568028615,96.0,15.0,0.09956665290961618,17.0,7.0,0.01186545604622369,0.0,0.0,0.0,114.0,27.0,0.12518915944421516,22.0,10.0,0.01864080341174852,89.0,51.0,0.09688402806438301,5.0,40.0,0.004299078277617279,58.0,39.0,0.053858852661989275,0.0,0.0,0.0,18.0,5.0,0.016508460586050353,57.0,1.0,0.05293025175402394,"curran, j"
+470,0.0,0.0,0.0,0.0,31.0,0.0,28.0,32.0,0.14196516595465,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,2.0,0.004929346040092014,0.0,3.0,0.0,0.0,15.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,4.0,0.0,0.0,4.0,0.0,0.0,40.0,0.0,113.0,312.0,0.7742359513637856,0.0,0.0,0.0,0.0,2.0,0.0,14.0,14.0,0.07886953664147223,"kong, f"
+471,0.0,0.0,0.0,2.0,29.0,0.003643319063667,79.0,25.0,0.18125512341743324,0.0,0.0,0.0,0.0,0.0,0.0,150.0,133.0,0.2536660898078149,0.0,0.0,0.0,29.0,69.0,0.048729392476546125,2.0,113.0,0.003643319063667,17.0,35.0,0.029146552509336007,4.0,5.0,0.007286638127334,0.0,0.0,0.0,22.0,15.0,0.0382548501685035,16.0,31.0,0.027871390837052555,14.0,21.0,0.029146552509336,78.0,77.0,0.16121686856726475,8.0,1.0,0.014573276254668,0.0,0.0,0.0,39.0,31.0,0.08516258311321613,74.0,27.0,0.11640404408416066,"lee, g"
+472,0.0,0.0,0.0,0.0,11.0,0.0,138.0,38.0,0.35691648822269806,0.0,0.0,0.0,0.0,0.0,0.0,2.0,39.0,0.010278372591006424,0.0,0.0,0.0,7.0,28.0,0.023982869379014986,0.0,13.0,0.0,50.0,38.0,0.11203426124197002,0.0,4.0,0.0,0.0,0.0,0.0,7.0,10.0,0.02312633832976445,38.0,46.0,0.35802997858672375,4.0,39.0,0.007537473233404711,6.0,168.0,0.0462526766595289,10.0,7.0,0.021927194860813702,0.0,0.0,0.0,8.0,17.0,0.028265524625267664,5.0,7.0,0.01164882226980728,"cahill, a"
+473,0.0,0.0,0.0,10.0,378.0,0.006199021207177815,349.0,380.0,0.28779771615008165,0.0,0.0,0.0,0.0,0.0,0.0,58.0,371.0,0.047634584013050575,0.0,0.0,0.0,12.0,283.0,0.007765089722675368,6.0,142.0,0.004567699836867863,74.0,91.0,0.0533768352365416,2.0,49.0,0.0009787928221859708,0.0,0.0,0.0,82.0,114.0,0.04691680261011419,19.0,172.0,0.013115823817292009,46.0,134.0,0.02597063621533442,135.0,63.0,0.08734094616639479,279.0,198.0,0.20835236541598695,0.0,0.0,0.0,120.0,305.0,0.08469820554649267,169.0,194.0,0.12528548123980424,"manning, c"
+474,0.0,0.0,0.0,0.0,51.0,0.0,0.0,11.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,16.0,0.06293706293706294,0.0,0.0,0.0,0.0,25.0,0.0,0.0,257.0,0.0,0.0,23.0,0.0,2.0,90.0,0.013986013986013986,0.0,0.0,0.0,95.0,56.0,0.6643356643356644,13.0,10.0,0.09090909090909091,4.0,19.0,0.027972027972027972,0.0,3.0,0.0,18.0,76.0,0.1258741258741259,0.0,0.0,0.0,0.0,5.0,0.0,2.0,7.0,0.013986013986013986,"bollegala, d"
+475,0.0,0.0,0.0,3.0,0.0,0.009390640317837057,92.0,6.0,0.30963314586450646,0.0,0.0,0.0,0.0,0.0,0.0,93.0,17.0,0.3367731283215521,0.0,0.0,0.0,17.0,0.0,0.059646045095712294,0.0,3.0,0.0,0.0,0.0,0.0,1.0,27.0,0.003508590887983076,0.0,0.0,0.0,0.0,26.0,0.0,6.0,45.0,0.024560136215881535,4.0,2.0,0.01929724988390692,16.0,4.0,0.0731644393994118,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.00877147721995769,49.0,9.0,0.15525514679325114,"weng, f"
+476,0.0,0.0,0.0,0.0,30.0,0.0,13.0,35.0,0.06303030303030302,0.0,0.0,0.0,0.0,0.0,0.0,9.0,9.0,0.03272727272727273,0.0,0.0,0.0,4.0,31.0,0.02303030303030303,0.0,15.0,0.0,11.0,7.0,0.05818181818181818,19.0,63.0,0.13818181818181818,0.0,0.0,0.0,21.0,14.0,0.11272727272727273,13.0,1.0,0.04727272727272727,4.0,3.0,0.025454545454545455,44.0,9.0,0.1806060606060606,15.0,9.0,0.10909090909090909,0.0,0.0,0.0,16.0,6.0,0.09333333333333332,18.0,12.0,0.11636363636363636,"ren, f"
+477,0.0,0.0,0.0,0.0,20.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,14.0,65.0,0.09745939192003332,0.0,0.0,0.0,43.0,126.0,0.3448563098708871,0.0,24.0,0.0,0.0,3.0,0.0,0.0,25.0,0.0,0.0,0.0,0.0,13.0,21.0,0.06197417742607247,38.0,107.0,0.2803831736776343,13.0,27.0,0.08563098708871304,0.0,5.0,0.0,17.0,67.0,0.08096626405664308,0.0,0.0,0.0,3.0,5.0,0.022490628904623073,7.0,5.0,0.026239067055393587,"demner-fushman, d"
+478,0.0,0.0,0.0,0.0,102.0,0.0,0.0,37.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,35.0,166.0,0.32835820895522394,0.0,0.0,0.0,0.0,13.0,0.0,2.0,72.0,0.01791044776119403,1.0,9.0,0.008955223880597015,2.0,59.0,0.011940298507462687,0.0,0.0,0.0,5.0,74.0,0.029850746268656716,41.0,212.0,0.3164179104477612,2.0,32.0,0.011940298507462687,1.0,17.0,0.008955223880597015,21.0,180.0,0.24179104477611943,0.0,0.0,0.0,0.0,0.0,0.0,2.0,59.0,0.023880597014925373,"carenini, g"
+479,0.0,0.0,0.0,5.0,0.0,0.059160989602007885,2.0,64.0,0.023664395840803154,0.0,0.0,0.0,0.0,0.0,0.0,2.0,32.0,0.01577626389386877,0.0,0.0,0.0,1.0,4.0,0.011832197920401577,0.0,9.0,0.0,6.0,36.0,0.07099318752240946,8.0,24.0,0.06310505557547508,0.0,0.0,0.0,4.0,10.0,0.04732879168160631,45.0,67.0,0.37683757619218355,19.0,13.0,0.137325206167085,4.0,3.0,0.043384725708139116,7.0,10.0,0.06704912154894227,0.0,0.0,0.0,5.0,3.0,0.020437432771602725,8.0,1.0,0.06310505557547508,"siddharthan, a"
+480,0.0,0.0,0.0,23.0,358.0,0.1597222222222222,53.0,98.0,0.2986111111111111,0.0,0.0,0.0,0.0,0.0,0.0,4.0,56.0,0.027777777777777776,0.0,0.0,0.0,0.0,47.0,0.0,0.0,182.0,0.0,0.0,59.0,0.0,17.0,45.0,0.11805555555555555,0.0,0.0,0.0,0.0,46.0,0.0,2.0,94.0,0.013888888888888888,0.0,27.0,0.0,0.0,26.0,0.0,14.0,397.0,0.04861111111111111,0.0,0.0,0.0,3.0,218.0,0.020833333333333332,51.0,132.0,0.3125,"titov, i"
+481,0.0,0.0,0.0,1.0,200.0,0.008412001121600149,2.0,27.0,0.011216001495466865,0.0,0.0,0.0,0.0,0.0,0.0,69.0,109.0,0.26002430133657356,0.0,0.0,0.0,0.0,116.0,0.0,0.0,134.0,0.0,0.0,22.0,0.0,0.0,140.0,0.0,0.0,0.0,0.0,14.0,58.0,0.09253201233760164,0.0,37.0,0.0,13.0,33.0,0.04963080661744088,52.0,108.0,0.31965604262080566,1.0,62.0,0.008412001121600149,0.0,0.0,0.0,30.0,56.0,0.16599682213290962,10.0,45.0,0.08412001121600149,"glass, j"
+482,0.0,0.0,0.0,2.0,377.0,0.0021067415730337082,10.0,22.0,0.030898876404494388,0.0,0.0,0.0,0.0,0.0,0.0,22.0,148.0,0.06882022471910113,0.0,0.0,0.0,75.0,203.0,0.16924157303370788,0.0,259.0,0.0,58.0,153.0,0.15063202247191013,1.0,113.0,0.0014044943820224719,0.0,0.0,0.0,77.0,205.0,0.1892556179775281,45.0,102.0,0.09564606741573034,31.0,109.0,0.09557584269662922,14.0,110.0,0.029494382022471913,36.0,68.0,0.11587078651685394,0.0,0.0,0.0,26.0,140.0,0.031741573033707865,12.0,45.0,0.019311797752808987,"lin, j"
+483,0.0,0.0,0.0,3.0,59.0,0.014705882352941175,1.0,12.0,0.004010695187165775,0.0,0.0,0.0,0.0,0.0,0.0,9.0,232.0,0.044117647058823525,0.0,0.0,0.0,12.0,30.0,0.0641711229946524,0.0,3.0,0.0,18.0,11.0,0.07219251336898395,16.0,253.0,0.07352941176470588,0.0,0.0,0.0,40.0,118.0,0.20454545454545453,6.0,26.0,0.0320855614973262,20.0,15.0,0.11631016042780747,41.0,74.0,0.2205882352941176,13.0,61.0,0.05213903743315508,0.0,0.0,0.0,15.0,19.0,0.06016042780748663,8.0,78.0,0.04144385026737967,"wong, k"
+484,0.0,0.0,0.0,0.0,77.0,0.0,23.0,21.0,0.12061990102982478,0.0,0.0,0.0,0.0,0.0,0.0,4.0,97.0,0.010431991440417279,0.0,0.0,0.0,0.0,68.0,0.0,0.0,185.0,0.0,0.0,16.0,0.0,0.0,150.0,0.0,0.0,0.0,0.0,2.0,96.0,0.0032599973251303998,0.0,55.0,0.0,0.0,46.0,0.0,5.0,122.0,0.019016651063260663,26.0,19.0,0.06138825732245553,0.0,0.0,0.0,166.0,135.0,0.7526832285676073,5.0,53.0,0.032599973251303996,"kumar, s"
+485,0.0,0.0,0.0,1.0,0.0,0.0015918747911757656,15.0,19.0,0.017908591400727366,0.0,0.0,0.0,0.0,0.0,0.0,34.0,0.0,0.0529298368065942,0.0,0.0,0.0,9.0,0.0,0.01392890442278795,2.0,0.0,0.001910249749410919,2.0,13.0,0.00278578088455759,0.0,0.0,0.0,0.0,0.0,0.0,14.0,3.0,0.0230821844720486,54.0,5.0,0.1252464344628647,10.0,1.0,0.02005762236881465,21.0,1.0,0.03900093238380626,4.0,0.0,0.0014694228841622453,0.0,0.0,0.0,346.0,57.0,0.5934325543642733,55.0,8.0,0.1066556110087763,"och, f"
+486,0.0,0.0,0.0,18.0,14.0,0.10865191146881288,17.0,5.0,0.09507042253521127,0.0,0.0,0.0,0.0,0.0,0.0,73.0,97.0,0.4344064386317908,0.0,0.0,0.0,0.0,48.0,0.0,0.0,3.0,0.0,3.0,93.0,0.019617706237424547,0.0,3.0,0.0,0.0,0.0,0.0,1.0,19.0,0.006036217303822937,3.0,39.0,0.018712273641851105,15.0,24.0,0.0977867203219316,3.0,1.0,0.018108651911468814,29.0,149.0,0.16841046277665994,0.0,0.0,0.0,3.0,4.0,0.028672032193158954,1.0,10.0,0.004527162977867203,"allen, j"
+487,0.0,0.0,0.0,0.0,150.0,0.0,165.0,136.0,0.607539958304378,0.0,0.0,0.0,0.0,0.0,0.0,9.0,26.0,0.02397498262682418,0.0,0.0,0.0,1.0,4.0,0.005211952744961779,0.0,8.0,0.0,2.0,16.0,0.003648366921473245,1.0,1.0,0.005211952744961779,0.0,0.0,0.0,7.0,24.0,0.012161223071577482,6.0,2.0,0.009120917303683113,33.0,28.0,0.05046907574704656,4.0,63.0,0.009120917303683113,0.0,5.0,0.0,0.0,0.0,0.0,92.0,252.0,0.25964211257817926,8.0,81.0,0.013898540653231409,"chiang, d"
+488,0.0,0.0,0.0,0.0,109.0,0.0,163.0,120.0,0.22649048963716767,0.0,0.0,0.0,0.0,0.0,0.0,29.0,70.0,0.03282007565666791,0.0,0.0,0.0,5.0,53.0,0.011188662155682242,0.0,44.0,0.0,37.0,63.0,0.07863143126076687,0.0,34.0,0.0,0.0,0.0,0.0,3.0,48.0,0.0040403502228852535,7.0,74.0,0.011410659420675937,22.0,73.0,0.028282451560196778,17.0,96.0,0.030147228586143812,6.0,117.0,0.005905127248832294,0.0,0.0,0.0,372.0,304.0,0.5316124105351021,32.0,53.0,0.03947111371587902,"knight, k"
+489,0.0,0.0,0.0,4.0,105.0,0.00941586748038361,8.0,16.0,0.020924149956408022,0.0,0.0,0.0,0.0,0.0,0.0,72.0,100.0,0.3114210985178727,0.0,0.0,0.0,1.0,31.0,0.0034873583260680036,20.0,24.0,0.06974716652136007,4.0,18.0,0.016041848299912814,3.0,81.0,0.010462074978204011,0.0,0.0,0.0,5.0,56.0,0.020924149956408022,5.0,6.0,0.022667829119442023,7.0,4.0,0.03138622493461203,115.0,93.0,0.4047079337401918,8.0,9.0,0.02789886660854403,0.0,0.0,0.0,8.0,6.0,0.03696599825632084,4.0,34.0,0.013949433304272014,"ostendorf, m"
+490,0.0,0.0,0.0,0.0,445.0,0.0,206.0,366.0,0.35675269698211104,0.0,0.0,0.0,0.0,0.0,0.0,11.0,164.0,0.018435067595247847,0.0,0.0,0.0,12.0,118.0,0.01720606308889799,17.0,388.0,0.03072511265874641,6.0,83.0,0.013655605626109514,30.0,222.0,0.03475351631844872,0.0,0.0,0.0,31.0,86.0,0.06827802813054758,8.0,275.0,0.013314215485456778,16.0,166.0,0.03345623378396831,4.0,70.0,0.006145022531749282,1.0,243.0,0.0020483408439164272,0.0,0.0,0.0,70.0,118.0,0.1136829168373617,163.0,326.0,0.29154718011743813,"smith, n"
+491,0.0,0.0,0.0,0.0,5.0,0.0,20.0,0.0,0.09652509652509653,0.0,0.0,0.0,0.0,0.0,0.0,24.0,47.0,0.1003861003861004,0.0,0.0,0.0,23.0,21.0,0.11583011583011583,22.0,57.0,0.15926640926640928,13.0,1.0,0.05067567567567568,3.0,31.0,0.02171814671814672,0.0,0.0,0.0,31.0,12.0,0.17422779922779924,12.0,8.0,0.06515444015444016,0.0,2.0,0.0,16.0,31.0,0.06370656370656372,6.0,3.0,0.02895752895752896,0.0,0.0,0.0,3.0,7.0,0.011341698841698842,19.0,5.0,0.11221042471042472,"seo, j"
+492,0.0,0.0,0.0,0.0,1.0,0.0,64.0,84.0,0.4075049374588545,0.0,0.0,0.0,0.0,0.0,0.0,4.0,16.0,0.026333113890717578,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,15.0,13.0,0.06287030941408822,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0016458196181698486,0.0,2.0,0.0,53.0,31.0,0.3778801843317972,5.0,32.0,0.019749835418038184,10.0,95.0,0.050362080315997364,0.0,0.0,0.0,16.0,1.0,0.05365371955233707,0.0,0.0,0.0,"flickinger, d"
+493,0.0,0.0,0.0,0.0,3.0,0.0,8.0,1.0,0.05194805194805195,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,11.0,0.0,0.0,0.0,0.0,8.0,43.0,0.05194805194805195,0.0,0.0,0.0,0.0,0.0,0.0,107.0,18.0,0.6948051948051948,11.0,8.0,0.07142857142857142,0.0,2.0,0.0,1.0,4.0,0.006493506493506494,0.0,3.0,0.0,0.0,0.0,0.0,18.0,136.0,0.11688311688311688,1.0,0.0,0.006493506493506494,"rapp, r"
+494,0.0,0.0,0.0,0.0,10.0,0.0,83.0,43.0,0.27689130842875914,0.0,0.0,0.0,0.0,0.0,0.0,9.0,10.0,0.027032100619485643,0.0,0.0,0.0,4.0,1.0,0.02064952130655153,0.0,17.0,0.0,10.0,16.0,0.045053501032476066,0.0,37.0,0.0,0.0,0.0,0.0,96.0,52.0,0.4862023653088042,4.0,2.0,0.012389712783930919,3.0,14.0,0.01351605030974282,0.0,1.0,0.0,17.0,76.0,0.09010700206495213,0.0,0.0,0.0,6.0,0.0,0.028158438145297542,0.0,0.0,0.0,"weir, d"
+495,0.0,0.0,0.0,2.0,0.0,0.022065313327449248,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,8.0,0.04766107678729038,0.0,0.0,0.0,1.0,1.0,0.00529567519858782,0.0,4.0,0.0,7.0,21.0,0.054721977052074135,59.0,91.0,0.5684024713150927,0.0,0.0,0.0,11.0,0.0,0.07546337157987644,1.0,2.0,0.00529567519858782,10.0,5.0,0.1085613415710503,4.0,2.0,0.0353045013239188,6.0,0.0,0.0706090026478376,0.0,0.0,0.0,1.0,0.0,0.006619593998234775,0.0,8.0,0.0,"wilson, t"
+496,0.0,0.0,0.0,0.0,125.0,0.0,152.0,22.0,0.749539849070495,0.0,0.0,0.0,0.0,0.0,0.0,5.0,44.0,0.010353395913859744,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,4.0,21.0,0.017393705135284373,0.0,0.0,0.0,0.0,0.0,0.0,19.0,4.0,0.06603165838394992,4.0,12.0,0.020936867292471927,7.0,5.0,0.02438799926375851,0.0,21.0,0.0,20.0,10.0,0.09203018590097549,0.0,0.0,0.0,0.0,120.0,0.0,4.0,9.0,0.019326339039204856,"ninomiya, t"
+497,0.0,0.0,0.0,9.0,190.0,0.03478664192949907,0.0,64.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,36.0,0.0,0.0,0.0,0.0,0.0,41.0,0.0,0.0,138.0,0.0,1.0,26.0,0.004638218923933209,2.0,13.0,0.0055658627087198514,0.0,0.0,0.0,6.0,103.0,0.027829313543599254,106.0,44.0,0.35180890538033394,3.0,62.0,0.007421150278293135,47.0,20.0,0.14656771799628943,1.0,25.0,0.004638218923933209,0.0,0.0,0.0,108.0,107.0,0.4139610389610389,1.0,21.0,0.0027829313543599257,"schwartz, r"
+498,0.0,0.0,0.0,13.0,2.0,0.026213452398098802,88.0,0.0,0.2833275035493097,0.0,0.0,0.0,0.0,0.0,0.0,17.0,11.0,0.029217505812637594,0.0,0.0,0.0,23.0,4.0,0.04067817534618629,0.0,0.0,0.0,31.0,4.0,0.08425752556531758,3.0,29.0,0.008641797493878726,0.0,0.0,0.0,54.0,13.0,0.1368284603197465,1.0,0.0,0.002880599164626242,16.0,1.0,0.03364128310117075,13.0,27.0,0.018723894570070572,57.0,6.0,0.14835085697825146,0.0,0.0,0.0,29.0,95.0,0.07849632723606509,42.0,0.0,0.10874261846464063,"rim, h"
+499,0.0,0.0,0.0,0.0,44.0,0.0,131.0,87.0,0.41450868477609837,0.0,0.0,0.0,0.0,0.0,0.0,24.0,27.0,0.08244371630905824,0.0,0.0,0.0,16.0,43.0,0.042366909769932695,8.0,7.0,0.02061092907726456,1.0,45.0,0.002290103230807173,2.0,34.0,0.005725258077017932,0.0,0.0,0.0,18.0,110.0,0.0343515484621076,1.0,5.0,0.0034351548462107596,10.0,35.0,0.030229362646654687,6.0,40.0,0.0171757742310538,4.0,17.0,0.002113941443822006,0.0,0.0,0.0,54.0,19.0,0.1535249973575732,57.0,36.0,0.19122361977239896,"smith, d"
+500,0.0,0.0,0.0,0.0,0.0,0.0,32.0,0.0,0.058741994073224356,0.0,0.0,0.0,0.0,0.0,0.0,7.0,9.0,0.023898288882516012,0.0,0.0,0.0,1.0,26.0,0.002867794665901921,0.0,6.0,0.0,172.0,18.0,0.5531975910524806,1.0,18.0,0.008603383997705764,0.0,0.0,0.0,18.0,23.0,0.09148264984227131,6.0,1.0,0.025810151993117293,7.0,2.0,0.020074562661313446,3.0,0.0,0.0071694866647548025,53.0,15.0,0.1341649937864449,0.0,0.0,0.0,19.0,1.0,0.07398910238026957,0.0,10.0,0.0,"fujita, s"
+501,0.0,0.0,0.0,8.0,142.0,0.017309404226791662,71.0,52.0,0.12401226494928069,0.0,0.0,0.0,0.0,0.0,0.0,31.0,277.0,0.07462276488883517,0.0,0.0,0.0,25.0,421.0,0.056159400380257386,3.0,346.0,0.004615841127144443,26.0,112.0,0.04170742161312657,10.0,37.0,0.015386137090481476,0.0,0.0,0.0,31.0,182.0,0.05923662779835368,7.0,232.0,0.01230890967238518,26.0,282.0,0.05769801408930554,18.0,304.0,0.022199997801980415,169.0,561.0,0.23363849171896126,0.0,0.0,0.0,38.0,50.0,0.07800771504874109,104.0,266.0,0.2030970095943555,"roth, d"
+502,0.0,0.0,0.0,0.0,42.0,0.0,157.0,445.0,0.6152486448267735,0.0,0.0,0.0,0.0,0.0,0.0,14.0,82.0,0.07689135045958048,0.0,0.0,0.0,6.0,44.0,0.021211407023332546,4.0,111.0,0.0047136460051850106,24.0,68.0,0.12550082488805092,0.0,0.0,0.0,0.0,0.0,0.0,0.0,28.0,0.0,1.0,5.0,0.0011784115012962526,14.0,38.0,0.04931652132924817,0.0,67.0,0.0,30.0,345.0,0.08944143294838558,0.0,0.0,0.0,6.0,18.0,0.0111949092623144,1.0,23.0,0.0053028517558331364,"steedman, m"
+503,0.0,0.0,0.0,1.0,116.0,0.005577689243027889,14.0,11.0,0.07808764940239044,0.0,0.0,0.0,0.0,0.0,0.0,34.0,54.0,0.22629482071713147,0.0,0.0,0.0,4.0,63.0,0.022310756972111555,0.0,104.0,0.0,10.0,14.0,0.05577689243027889,11.0,184.0,0.054183266932270914,0.0,0.0,0.0,1.0,63.0,0.005577689243027889,0.0,74.0,0.0,10.0,74.0,0.05577689243027889,0.0,22.0,0.0,53.0,25.0,0.4964143426294821,0.0,0.0,0.0,0.0,12.0,0.0,0.0,34.0,0.0,"lee, m"
+504,0.0,0.0,0.0,2.0,10.0,0.006582556226001096,64.0,124.0,0.19989029072956663,0.0,0.0,0.0,0.0,0.0,0.0,0.0,11.0,0.0,0.0,0.0,0.0,5.0,4.0,0.01645639056500274,15.0,7.0,0.04936917169500822,90.0,66.0,0.2622051563357103,2.0,52.0,0.006582556226001096,0.0,0.0,0.0,35.0,59.0,0.11300054854635216,21.0,49.0,0.06582556226001096,17.0,66.0,0.04333516182117388,23.0,7.0,0.07460230389467909,42.0,135.0,0.08217224355458035,0.0,0.0,0.0,15.0,51.0,0.045419637959407566,17.0,15.0,0.034558420186505755,"rappoport, a"
+505,0.0,0.0,0.0,0.0,5.0,0.0,1.0,1.0,0.003925417075564279,0.0,0.0,0.0,0.0,0.0,0.0,2.0,29.0,0.007850834151128559,0.0,0.0,0.0,29.0,26.0,0.10794896957801767,0.0,1.0,0.0,51.0,41.0,0.27085377821393525,14.0,201.0,0.04416094210009814,0.0,0.0,0.0,22.0,21.0,0.12561334641805694,7.0,21.0,0.03532875368007851,5.0,18.0,0.015701668302257117,1.0,9.0,0.0029440628066732095,68.0,101.0,0.3336604514229637,0.0,0.0,0.0,1.0,1.0,0.003925417075564279,15.0,10.0,0.04808635917566242,"riloff, e"
+506,0.0,0.0,0.0,0.0,75.0,0.0,60.0,173.0,0.34405594405594403,0.0,0.0,0.0,0.0,0.0,0.0,0.0,60.0,0.0,0.0,0.0,0.0,2.0,25.0,0.011988011988011988,15.0,238.0,0.0899100899100899,27.0,90.0,0.10989010989010987,0.0,24.0,0.0,0.0,0.0,0.0,9.0,164.0,0.053946053946053944,11.0,29.0,0.05994005994005994,15.0,113.0,0.06693306693306694,2.0,44.0,0.009990009990009988,37.0,59.0,0.11968031968031968,0.0,0.0,0.0,14.0,50.0,0.07672327672327672,16.0,90.0,0.05694305694305694,"reichart, r"
+507,0.0,0.0,0.0,3.0,25.0,0.033860045146726865,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,22.0,0.09029345372460497,0.0,0.0,0.0,0.0,13.0,0.0,0.0,11.0,0.0,0.0,2.0,0.0,0.0,33.0,0.0,0.0,0.0,0.0,39.0,73.0,0.3724604966139955,3.0,22.0,0.033860045146726865,5.0,13.0,0.05643340857787811,10.0,7.0,0.048156508653122654,1.0,13.0,0.011286681715575621,0.0,0.0,0.0,31.0,29.0,0.29721595184349137,5.0,22.0,0.05643340857787811,"zhai, c"
+508,0.0,0.0,0.0,6.0,81.0,0.05321507760532151,0.0,24.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,48.0,0.04434589800443459,0.0,0.0,0.0,2.0,11.0,0.017738359201773836,0.0,20.0,0.0,13.0,5.0,0.09534368070953438,2.0,38.0,0.017738359201773836,0.0,0.0,0.0,29.0,44.0,0.221729490022173,6.0,22.0,0.04212860310421286,24.0,14.0,0.17073170731707318,5.0,34.0,0.04212860310421286,28.0,16.0,0.24390243902439027,0.0,0.0,0.0,1.0,10.0,0.008869179600886918,5.0,4.0,0.04212860310421286,"ng, a"
+509,0.0,0.0,0.0,3.0,4.0,0.031746031746031744,1.0,15.0,0.010582010582010581,0.0,0.0,0.0,0.0,0.0,0.0,24.0,53.0,0.19047619047619047,0.0,0.0,0.0,4.0,9.0,0.042328042328042326,0.0,1.0,0.0,14.0,0.0,0.14814814814814814,0.0,0.0,0.0,0.0,0.0,0.0,7.0,6.0,0.07407407407407407,40.0,12.0,0.3439153439153439,7.0,4.0,0.07407407407407407,0.0,14.0,0.0,8.0,2.0,0.042328042328042326,0.0,0.0,0.0,5.0,2.0,0.042328042328042326,0.0,4.0,0.0,"sripada, s"
+510,0.0,0.0,0.0,0.0,114.0,0.0,64.0,113.0,0.459387483355526,0.0,0.0,0.0,0.0,0.0,0.0,15.0,134.0,0.053262316910785625,0.0,0.0,0.0,0.0,202.0,0.0,1.0,188.0,0.0039946737683089215,29.0,39.0,0.09720372836218377,19.0,254.0,0.07589880159786952,0.0,0.0,0.0,17.0,89.0,0.04793608521970706,11.0,69.0,0.04394141145139814,6.0,95.0,0.03595206391478029,36.0,79.0,0.0985352862849534,17.0,101.0,0.06790945406125166,0.0,0.0,0.0,3.0,87.0,0.015978695073235686,0.0,48.0,0.0,"park, j"
+511,0.0,0.0,0.0,0.0,105.0,0.0,2.0,41.0,0.015873015873015872,0.0,0.0,0.0,0.0,0.0,0.0,2.0,97.0,0.015873015873015872,0.0,0.0,0.0,0.0,130.0,0.0,0.0,85.0,0.0,3.0,9.0,0.02380952380952381,0.0,38.0,0.0,0.0,0.0,0.0,0.0,21.0,0.0,0.0,5.0,0.0,8.0,7.0,0.06349206349206349,67.0,21.0,0.5317460317460317,6.0,196.0,0.04761904761904762,0.0,0.0,0.0,0.0,60.0,0.0,38.0,51.0,0.3015873015873015,"he, s"
+512,0.0,0.0,0.0,1.0,0.0,0.014457831325301203,20.0,10.0,0.19277108433734938,0.0,0.0,0.0,0.0,0.0,0.0,0.0,18.0,0.0,0.0,0.0,0.0,5.0,8.0,0.043373493975903614,0.0,0.0,0.0,32.0,16.0,0.23132530120481926,0.0,6.0,0.0,0.0,0.0,0.0,21.0,0.0,0.15180722891566265,3.0,1.0,0.024096385542168672,4.0,0.0,0.028915662650602407,12.0,114.0,0.13012048192771083,18.0,0.0,0.14457831325301204,0.0,0.0,0.0,1.0,0.0,0.007228915662650602,4.0,0.0,0.03132530120481927,"tseng, c"
+513,0.0,0.0,0.0,0.0,30.0,0.0,19.0,1.0,0.15281501340482576,0.0,0.0,0.0,0.0,0.0,0.0,8.0,3.0,0.064343163538874,0.0,0.0,0.0,4.0,31.0,0.026809651474530828,0.0,4.0,0.0,4.0,1.0,0.032171581769437,29.0,1.0,0.2037533512064343,0.0,0.0,0.0,8.0,27.0,0.064343163538874,3.0,4.0,0.021447721179624665,5.0,18.0,0.040214477211796246,0.0,0.0,0.0,4.0,12.0,0.032171581769437,0.0,0.0,0.0,1.0,0.0,0.00804289544235925,44.0,1.0,0.353887399463807,"dasgupta, s"
+514,0.0,0.0,0.0,3.0,4.0,0.007800650054171183,9.0,2.0,0.05417118093174432,0.0,0.0,0.0,0.0,0.0,0.0,2.0,10.0,0.013001083423618637,0.0,0.0,0.0,0.0,48.0,0.0,9.0,0.0,0.058504875406283866,11.0,8.0,0.06283856988082341,0.0,42.0,0.0,0.0,0.0,0.0,20.0,69.0,0.0790899241603467,3.0,1.0,0.00801733477789816,10.0,1.0,0.04279523293607801,64.0,75.0,0.3081256771397617,61.0,1.0,0.30065005417118096,0.0,0.0,0.0,3.0,47.0,0.010834236186348862,13.0,49.0,0.05417118093174431,"tsai, r"
+515,0.0,0.0,0.0,1.0,36.0,0.009933774834437087,16.0,15.0,0.1357615894039735,0.0,0.0,0.0,0.0,0.0,0.0,4.0,125.0,0.02980132450331126,0.0,0.0,0.0,11.0,38.0,0.10927152317880796,0.0,24.0,0.0,2.0,10.0,0.019867549668874173,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.019867549668874173,38.0,78.0,0.3774834437086093,4.0,18.0,0.03642384105960265,0.0,14.0,0.0,13.0,5.0,0.12913907284768214,0.0,0.0,0.0,5.0,68.0,0.04304635761589404,9.0,7.0,0.08940397350993379,"soricut, r"
+516,0.0,0.0,0.0,0.0,6.0,0.0,23.0,0.0,0.16075062174994348,0.0,0.0,0.0,0.0,0.0,0.0,0.0,16.0,0.0,0.0,0.0,0.0,2.0,0.0,0.006782726656115758,0.0,0.0,0.0,6.0,1.0,0.022382997965182002,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,76.0,111.0,0.5640967669002939,2.0,0.0,0.016956816640289397,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,66.0,0.0,0.22903007008817544,0.0,0.0,0.0,"owczarzak, k"
+517,0.0,0.0,0.0,1.0,30.0,0.012295081967213116,29.0,34.0,0.20901639344262296,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,13.0,10.0,0.07049180327868854,5.0,1.0,0.027868852459016394,2.0,6.0,0.009836065573770493,0.0,0.0,0.0,0.0,11.0,0.0,2.0,0.0,0.009836065573770493,15.0,13.0,0.12704918032786885,5.0,12.0,0.045081967213114756,0.0,44.0,0.0,0.0,0.0,0.0,28.0,16.0,0.23770491803278687,29.0,50.0,0.2508196721311476,"ganchev, k"
+518,0.0,0.0,0.0,0.0,25.0,0.0,84.0,100.0,0.34240506329113923,0.0,0.0,0.0,0.0,0.0,0.0,18.0,171.0,0.04936708860759494,0.0,0.0,0.0,0.0,6.0,0.0,11.0,76.0,0.04177215189873418,9.0,24.0,0.04810126582278482,1.0,53.0,0.00379746835443038,0.0,0.0,0.0,3.0,46.0,0.00759493670886076,3.0,34.0,0.010126582278481013,23.0,57.0,0.07468354430379746,3.0,7.0,0.008860759493670885,42.0,44.0,0.15000000000000002,0.0,0.0,0.0,6.0,4.0,0.02278481012658228,60.0,86.0,0.24050632911392406,"baldridge, j"
+519,0.0,0.0,0.0,1.0,60.0,0.0017873100983020554,74.0,143.0,0.3333333333333333,0.0,0.0,0.0,0.0,0.0,0.0,6.0,30.0,0.021447721179624665,0.0,0.0,0.0,1.0,7.0,0.0017873100983020554,2.0,41.0,0.00804289544235925,3.0,81.0,0.010723860589812333,0.0,27.0,0.0,0.0,0.0,0.0,1.0,27.0,0.0035746201966041107,1.0,9.0,0.0035746201966041107,1.0,29.0,0.0035746201966041107,54.0,199.0,0.193923145665773,0.0,38.0,0.0,0.0,0.0,0.0,10.0,31.0,0.05361930294906166,83.0,43.0,0.3646112600536193,"goldwater, s"
+520,0.0,0.0,0.0,2.0,52.0,0.01665972511453561,19.0,49.0,0.13952519783423573,0.0,0.0,0.0,0.0,0.0,0.0,4.0,102.0,0.04997917534360683,0.0,0.0,0.0,1.0,85.0,0.006247396917950854,16.0,136.0,0.15368596418159103,0.0,0.0,0.0,0.0,63.0,0.0,0.0,0.0,0.0,4.0,1.0,0.022907122032486463,0.0,47.0,0.0,1.0,5.0,0.0049979175343606835,24.0,32.0,0.2613494377342774,0.0,37.0,0.0,0.0,0.0,0.0,33.0,38.0,0.15514369012911286,23.0,26.0,0.18950437317784255,"xu, p"
+521,0.0,0.0,0.0,0.0,7.0,0.0,3.0,6.0,0.018975332068311195,0.0,0.0,0.0,0.0,0.0,0.0,2.0,13.0,0.007590132827324478,0.0,0.0,0.0,0.0,2.0,0.0,0.0,19.0,0.0,22.0,11.0,0.17520556609740667,0.0,14.0,0.0,0.0,0.0,0.0,61.0,0.0,0.5161290322580645,14.0,12.0,0.08349146110056926,5.0,25.0,0.036685641998734975,2.0,94.0,0.018975332068311195,6.0,3.0,0.06325110689437065,0.0,0.0,0.0,0.0,36.0,0.0,5.0,23.0,0.07969639468690702,"hagiwara, m"
+522,0.0,0.0,0.0,1.0,11.0,0.012793176972281451,95.0,76.0,0.4968017057569297,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,19.0,10.0,0.06823027718550108,2.0,2.0,0.008528784648187633,0.0,4.0,0.0,0.0,0.0,0.0,2.0,5.0,0.008528784648187633,2.0,1.0,0.012793176972281451,18.0,12.0,0.0628997867803838,0.0,4.0,0.0,43.0,60.0,0.20788912579957358,0.0,0.0,0.0,13.0,0.0,0.07142857142857144,14.0,32.0,0.05010660980810235,"carreras, x"
+523,0.0,0.0,0.0,0.0,65.0,0.0,1.0,0.0,0.011085450346420323,0.0,0.0,0.0,0.0,0.0,0.0,0.0,67.0,0.0,0.0,0.0,0.0,0.0,45.0,0.0,0.0,11.0,0.0,0.0,0.0,0.0,4.0,17.0,0.04434180138568129,0.0,0.0,0.0,0.0,15.0,0.0,10.0,4.0,0.1182448036951501,2.0,4.0,0.013856812933025403,58.0,4.0,0.4600461893764434,22.0,14.0,0.274364896073903,0.0,0.0,0.0,9.0,0.0,0.07806004618937643,0.0,0.0,0.0,"lai, t"
+524,0.0,0.0,0.0,0.0,1.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,43.0,0.012048192771084338,0.0,0.0,0.0,1.0,27.0,0.004016064257028112,0.0,10.0,0.0,0.0,19.0,0.0,1.0,13.0,0.01606425702811245,0.0,0.0,0.0,0.0,24.0,0.0,56.0,71.0,0.3855421686746988,7.0,91.0,0.11244979919678715,0.0,138.0,0.0,0.0,24.0,0.0,0.0,0.0,0.0,57.0,21.0,0.43775100401606426,2.0,5.0,0.0321285140562249,"madnani, n"
+525,0.0,0.0,0.0,4.0,3.0,0.011116812040362576,20.0,12.0,0.06145126655644869,0.0,0.0,0.0,0.0,0.0,0.0,6.0,7.0,0.019145620736179994,0.0,0.0,0.0,0.0,3.0,0.0,1.0,32.0,0.005558406020181288,3.0,0.0,0.012043213043726124,0.0,1.0,0.0,0.0,0.0,0.0,33.0,54.0,0.10097770936662673,3.0,2.0,0.007411208026908384,9.0,7.0,0.030571233110997086,58.0,116.0,0.2384864982992227,9.0,0.0,0.020095775611424657,0.0,0.0,0.0,99.0,86.0,0.32639007658248287,42.0,30.0,0.16675218060543864,"khudanpur, s"
+526,0.0,0.0,0.0,0.0,3.0,0.0,38.0,48.0,0.12299465240641709,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,1.0,0.0,0.0007639419404125285,0.0,0.0,0.0,0.0,0.0,0.0,6.0,8.0,0.009931245225362872,2.0,1.0,0.001527883880825057,5.0,0.0,0.008403361344537815,16.0,19.0,0.05003819709702062,0.0,0.0,0.0,0.0,0.0,0.0,211.0,114.0,0.7750190985485101,8.0,1.0,0.03132161955691367,"byrne, w"
+527,0.0,0.0,0.0,0.0,34.0,0.0,4.0,7.0,0.04678362573099415,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,2.0,0.0,0.017543859649122806,0.0,0.0,0.0,0.0,0.0,0.0,2.0,8.0,0.02046783625730994,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,7.0,0.0,0.06725146198830409,0.0,0.0,0.0,76.0,86.0,0.7076023391812865,14.0,30.0,0.14035087719298245,"setiawan, h"
+528,0.0,0.0,0.0,0.0,30.0,0.0,16.0,0.0,0.04733178654292344,0.0,0.0,0.0,0.0,0.0,0.0,12.0,59.0,0.0419953596287703,0.0,0.0,0.0,19.0,21.0,0.05336426914153132,1.0,31.0,0.0027842227378190258,22.0,1.0,0.06473317865429236,5.0,2.0,0.017401392111368912,0.0,0.0,0.0,111.0,13.0,0.4088167053364269,2.0,25.0,0.0055684454756380515,10.0,9.0,0.03271461716937355,10.0,11.0,0.034802784222737825,15.0,4.0,0.06728538283062645,0.0,0.0,0.0,25.0,3.0,0.08236658932714618,37.0,6.0,0.14083526682134573,"niu, c"
+529,0.0,0.0,0.0,0.0,29.0,0.0,10.0,5.0,0.08119689693387515,0.0,0.0,0.0,0.0,0.0,0.0,0.0,32.0,0.0,0.0,0.0,0.0,12.0,31.0,0.09309198374584411,0.0,41.0,0.0,6.0,13.0,0.0744735869966753,0.0,2.0,0.0,0.0,0.0,0.0,14.0,8.0,0.17997783524196528,1.0,81.0,0.004432951606944958,5.0,14.0,0.032065016623568525,27.0,7.0,0.1799778352419653,0.0,26.0,0.0,0.0,0.0,0.0,31.0,24.0,0.2772072404876247,5.0,32.0,0.07757665312153676,"weischedel, r"
+530,0.0,0.0,0.0,0.0,135.0,0.0,0.0,29.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.0,189.0,0.06432748538011696,0.0,0.0,0.0,1.0,48.0,0.008771929824561403,10.0,77.0,0.08771929824561403,11.0,27.0,0.0935672514619883,0.0,33.0,0.0,0.0,0.0,0.0,13.0,87.0,0.10526315789473684,25.0,34.0,0.21929824561403508,12.0,33.0,0.10526315789473684,0.0,45.0,0.0,21.0,214.0,0.16666666666666666,0.0,0.0,0.0,0.0,106.0,0.0,17.0,37.0,0.14912280701754385,"moens, m"
+531,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,12.0,10.0,0.09190120620333142,0.0,0.0,0.0,0.0,9.0,0.0,0.0,4.0,0.0,10.0,1.0,0.03733486502010339,3.0,17.0,0.02106069308826345,0.0,0.0,0.0,75.0,69.0,0.3460654796094199,4.0,0.0,0.01340225923798583,20.0,2.0,0.11296189929159486,53.0,23.0,0.26574765460463334,8.0,7.0,0.013880911353628183,0.0,0.0,0.0,17.0,60.0,0.09764503159103964,0.0,4.0,0.0,"oard, d"
+532,0.0,0.0,0.0,0.0,3.0,0.0,58.0,139.0,0.3514357053682896,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.006242197253433207,0.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,17.0,0.0,3.0,4.0,0.010923845193508112,42.0,96.0,0.2016229712858926,11.0,2.0,0.06054931335830212,75.0,0.0,0.30805243445692887,0.0,0.0,0.0,3.0,14.0,0.014357053682896376,5.0,0.0,0.04681647940074906,"kulick, s"
+533,0.0,0.0,0.0,0.0,18.0,0.0,50.0,83.0,0.14478764478764478,0.0,0.0,0.0,0.0,0.0,0.0,5.0,4.0,0.015444015444015444,0.0,0.0,0.0,0.0,4.0,0.0,0.0,56.0,0.0,26.0,69.0,0.10231660231660232,0.0,0.0,0.0,0.0,0.0,0.0,2.0,15.0,0.007722007722007722,4.0,36.0,0.013513513513513514,8.0,50.0,0.03474903474903475,5.0,10.0,0.01673101673101673,150.0,60.0,0.5585585585585586,0.0,0.0,0.0,12.0,36.0,0.0444015444015444,22.0,9.0,0.06177606177606178,"merlo, p"
+534,0.0,0.0,0.0,1.0,1.0,0.004651162790697674,86.0,215.0,0.7255813953488373,0.0,0.0,0.0,0.0,0.0,0.0,4.0,12.0,0.037209302325581395,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,5.0,41.0,0.03255813953488372,1.0,0.0,0.004651162790697674,0.0,0.0,0.0,0.0,11.0,0.0,0.0,0.0,0.0,2.0,37.0,0.018604651162790697,0.0,2.0,0.0,19.0,28.0,0.17674418604651163,0.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,"kahane, s"
+535,0.0,0.0,0.0,0.0,39.0,0.0,1.0,15.0,0.0023734177215189874,0.0,0.0,0.0,0.0,0.0,0.0,14.0,61.0,0.044303797468354424,0.0,0.0,0.0,46.0,30.0,0.21835443037974683,0.0,12.0,0.0,0.0,23.0,0.0,0.0,379.0,0.0,0.0,0.0,0.0,76.0,32.0,0.31091772151898733,35.0,134.0,0.14556962025316456,0.0,31.0,0.0,8.0,6.0,0.022943037974683545,0.0,18.0,0.0,0.0,0.0,0.0,45.0,0.0,0.1305379746835443,50.0,17.0,0.125,"varma, v"
+536,0.0,0.0,0.0,0.0,5.0,0.0,42.0,80.0,0.36122102882984736,0.0,0.0,0.0,0.0,0.0,0.0,4.0,31.0,0.022611644997173542,0.0,0.0,0.0,3.0,1.0,0.05087620124364047,1.0,0.0,0.0056529112492933855,10.0,28.0,0.0847936687394008,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,36.0,110.0,0.285472018089316,1.0,12.0,0.008479366873940079,21.0,75.0,0.18089315997738833,0.0,0.0,0.0,0.0,3.0,0.0,0.0,0.0,0.0,"bender, e"
+537,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0025553662691652473,0.0,0.0,0.0,0.0,0.0,0.0,37.0,12.0,0.24914821124361158,0.0,0.0,0.0,29.0,7.0,0.1788756388415673,0.0,0.0,0.0,24.0,24.0,0.14310051107325383,0.0,10.0,0.0,0.0,0.0,0.0,1.0,8.0,0.006388415672913117,8.0,0.0,0.03620102214650766,8.0,8.0,0.040885860306643956,17.0,27.0,0.07197614991482112,34.0,29.0,0.23253833049403752,0.0,0.0,0.0,6.0,0.0,0.03833049403747871,0.0,0.0,0.0,"saint-dizier, p"
+538,0.0,0.0,0.0,0.0,25.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,59.0,236.0,0.5648267008985881,0.0,0.0,0.0,6.0,1.0,0.030808729139922983,0.0,1.0,0.0,0.0,38.0,0.0,0.0,30.0,0.0,0.0,0.0,0.0,0.0,15.0,0.0,1.0,22.0,0.003851091142490373,1.0,24.0,0.007702182284980746,7.0,64.0,0.05391527599486522,13.0,21.0,0.05006418485237485,0.0,0.0,0.0,6.0,12.0,0.046213093709884474,28.0,41.0,0.24261874197689348,"williams, j"
+539,0.0,0.0,0.0,8.0,16.0,0.04555808656036446,1.0,9.0,0.004555808656036446,0.0,0.0,0.0,0.0,0.0,0.0,5.0,17.0,0.04100227790432802,0.0,0.0,0.0,1.0,3.0,0.013667425968109338,0.0,8.0,0.0,20.0,83.0,0.23917995444191342,9.0,122.0,0.04100227790432802,0.0,0.0,0.0,5.0,17.0,0.02277904328018223,8.0,27.0,0.06605922551252846,33.0,125.0,0.18678815489749429,0.0,31.0,0.0,44.0,383.0,0.3348519362186788,0.0,0.0,0.0,1.0,18.0,0.004555808656036446,0.0,5.0,0.0,"stede, m"
+540,0.0,0.0,0.0,2.0,189.0,0.02279826496321189,15.0,76.0,0.10715184532709589,0.0,0.0,0.0,0.0,0.0,0.0,15.0,278.0,0.0694458837288487,0.0,0.0,0.0,14.0,139.0,0.15958785474248324,0.0,135.0,0.0,0.0,59.0,0.0,0.0,402.0,0.0,0.0,0.0,0.0,19.0,53.0,0.21658351715051297,5.0,31.0,0.056995662408029724,28.0,42.0,0.15825548860826955,0.0,58.0,0.0,16.0,70.0,0.11961687071607277,0.0,0.0,0.0,6.0,143.0,0.05536721491065745,3.0,41.0,0.03419739744481784,"kumar, a"
+541,0.0,0.0,0.0,0.0,18.0,0.0,63.0,3.0,0.5057471264367817,0.0,0.0,0.0,0.0,0.0,0.0,0.0,35.0,0.0,0.0,0.0,0.0,5.0,38.0,0.05747126436781609,0.0,0.0,0.0,3.0,55.0,0.022988505747126436,0.0,4.0,0.0,0.0,0.0,0.0,6.0,57.0,0.04597701149425287,0.0,11.0,0.0,3.0,11.0,0.026819923371647507,0.0,5.0,0.0,35.0,16.0,0.34099616858237547,0.0,0.0,0.0,0.0,16.0,0.0,0.0,0.0,0.0,"thater, s"
+542,0.0,0.0,0.0,1.0,17.0,0.0026951151038742283,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,135.0,441.0,0.7359910162829871,0.0,0.0,0.0,0.0,26.0,0.0,2.0,6.0,0.01347557551937114,0.0,5.0,0.0,0.0,61.0,0.0,0.0,0.0,0.0,0.0,19.0,0.0,3.0,143.0,0.02021336327905671,9.0,28.0,0.02605277933745087,2.0,11.0,0.008422234699606963,0.0,10.0,0.0,0.0,0.0,0.0,1.0,14.0,0.00673778775968557,30.0,48.0,0.18641212801796744,"rieser, v"
+543,0.0,0.0,0.0,0.0,18.0,0.0,1.0,1.0,0.0052770448548812654,0.0,0.0,0.0,0.0,0.0,0.0,100.0,210.0,0.3887423043095865,0.0,0.0,0.0,6.0,10.0,0.02638522427440633,0.0,1.0,0.0,15.0,2.0,0.07915567282321899,0.0,10.0,0.0,0.0,0.0,0.0,3.0,4.0,0.031662269129287594,76.0,93.0,0.34212840809146877,8.0,40.0,0.042216358839050123,6.0,9.0,0.021108179419525062,9.0,7.0,0.02638522427440633,0.0,0.0,0.0,8.0,11.0,0.03693931398416886,0.0,15.0,0.0,"reiter, e"
+544,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.00570148645896966,0.0,0.0,0.0,0.0,0.0,0.0,133.0,83.0,0.8227448584809611,0.0,0.0,0.0,1.0,0.0,0.00570148645896966,0.0,2.0,0.0,0.0,0.0,0.0,3.0,1.0,0.01026267562614539,0.0,0.0,0.0,4.0,0.0,0.013683567501527187,6.0,4.0,0.02736713500305437,8.0,1.0,0.04398289554062309,6.0,5.0,0.034208918753817964,8.0,0.0,0.034208918753817964,0.0,0.0,0.0,1.0,3.0,0.0021380574221136228,0.0,8.0,0.0,"cassell, j"
+545,0.0,0.0,0.0,7.0,33.0,0.05002977963073258,1.0,0.0,0.0017867778439547351,0.0,0.0,0.0,0.0,0.0,0.0,229.0,286.0,0.6594911937377691,0.0,0.0,0.0,12.0,22.0,0.03692674210839785,0.0,7.0,0.0,3.0,17.0,0.0071471113758189405,0.0,2.0,0.0,0.0,0.0,0.0,1.0,8.0,0.001021015910831277,14.0,30.0,0.04764740917212627,0.0,6.0,0.0,15.0,24.0,0.048243001786777845,48.0,123.0,0.1405598570577725,0.0,0.0,0.0,0.0,3.0,0.0,3.0,21.0,0.0071471113758189405,"chai, j"
+546,0.0,0.0,0.0,0.0,11.0,0.0,15.0,0.0,0.043458083336095736,0.0,0.0,0.0,0.0,0.0,0.0,164.0,114.0,0.5531209599893924,0.0,0.0,0.0,21.0,23.0,0.0652036994066364,3.0,8.0,0.005966784897404449,2.0,6.0,0.005568999237577485,1.0,0.0,0.0027844996187887426,0.0,0.0,0.0,13.0,9.0,0.04524811880531707,0.0,2.0,0.0,0.0,3.0,0.0,62.0,46.0,0.23200848609407632,14.0,25.0,0.03967911956773958,0.0,0.0,0.0,1.0,3.0,0.0034806245234859285,1.0,17.0,0.0034806245234859285,"komatani, k"
+547,0.0,0.0,0.0,0.0,4.0,0.0,11.0,19.0,0.08702092205147195,0.0,0.0,0.0,0.0,0.0,0.0,78.0,69.0,0.4973153119792631,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,9.0,14.0,0.024995371227550453,0.0,0.0,0.0,0.0,0.0,0.0,6.0,4.0,0.06109979633401222,4.0,16.0,0.020366598778004074,10.0,4.0,0.08331790409183484,3.0,9.0,0.011109053878911312,51.0,37.0,0.1814478800222181,0.0,0.0,0.0,0.0,1.0,0.0,6.0,2.0,0.03332716163673394,"stone, m"
+548,0.0,0.0,0.0,1.0,6.0,0.012277470841006752,4.0,4.0,0.03273992224268467,0.0,0.0,0.0,0.0,0.0,0.0,118.0,475.0,0.5915694700225086,0.0,0.0,0.0,3.0,0.0,0.027624309392265192,4.0,0.0,0.024554941682013505,6.0,4.0,0.022099447513812154,3.0,4.0,0.01534683855125844,0.0,0.0,0.0,9.0,6.0,0.03171679967260077,19.0,45.0,0.09358161107700702,2.0,52.0,0.016369961121342334,17.0,68.0,0.048632426164654526,20.0,25.0,0.0738694495600573,0.0,0.0,0.0,3.0,0.0,0.00961735215878862,0.0,59.0,0.0,"traum, d"
+549,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,64.0,274.0,0.2822384428223844,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,6.0,14.0,0.029197080291970802,2.0,12.0,0.009732360097323601,7.0,54.0,0.10218978102189781,0.0,1.0,0.0,0.0,0.0,0.0,3.0,1.0,0.01338199513381995,66.0,143.0,0.5632603406326033,"georgila, k"
+550,0.0,0.0,0.0,9.0,153.0,0.12811387900355872,61.0,88.0,0.43416370106761565,0.0,0.0,0.0,0.0,0.0,0.0,4.0,38.0,0.021352313167259787,0.0,0.0,0.0,11.0,62.0,0.13523131672597866,0.0,98.0,0.0,3.0,49.0,0.021352313167259787,0.0,35.0,0.0,0.0,0.0,0.0,0.0,53.0,0.0,15.0,87.0,0.1423487544483986,7.0,44.0,0.046263345195729534,2.0,81.0,0.014234875444839857,4.0,75.0,0.014234875444839857,0.0,0.0,0.0,1.0,5.0,0.0071174377224199285,5.0,43.0,0.03558718861209965,"levy, r"
+551,0.0,0.0,0.0,3.0,25.0,0.008797653958944282,16.0,3.0,0.07038123167155426,0.0,0.0,0.0,0.0,0.0,0.0,5.0,6.0,0.015395894428152495,0.0,0.0,0.0,39.0,10.0,0.10557184750733138,0.0,9.0,0.0,38.0,0.0,0.13274682306940372,1.0,25.0,0.002932551319648094,0.0,0.0,0.0,17.0,0.0,0.07038123167155426,11.0,3.0,0.03665689149560118,5.0,7.0,0.019061583577712614,1.0,5.0,0.004398826979472141,126.0,160.0,0.42463343108504403,0.0,0.0,0.0,28.0,0.0,0.07062561094819159,10.0,1.0,0.038416422287390034,"moldovan, d"
+552,0.0,0.0,0.0,0.0,16.0,0.0,158.0,40.0,0.6017777777777777,0.0,0.0,0.0,0.0,0.0,0.0,20.0,25.0,0.13066666666666665,0.0,0.0,0.0,4.0,3.0,0.01185185185185185,0.0,3.0,0.0,4.0,72.0,0.007111111111111111,1.0,23.0,0.004444444444444444,0.0,0.0,0.0,11.0,4.0,0.02074074074074074,2.0,4.0,0.005925925925925925,41.0,20.0,0.15466666666666665,2.0,75.0,0.008888888888888887,2.0,1.0,0.0035555555555555553,0.0,0.0,0.0,13.0,187.0,0.04444444444444444,2.0,0.0,0.005925925925925925,"wintner, s"
+553,0.0,0.0,0.0,0.0,27.0,0.0,1.0,1.0,0.001190003966679889,0.0,0.0,0.0,0.0,0.0,0.0,25.0,9.0,0.04998016660055534,0.0,0.0,0.0,3.0,6.0,0.004760015866719556,0.0,0.0,0.0,219.0,113.0,0.4581515271717572,0.0,21.0,0.0,0.0,0.0,0.0,65.0,33.0,0.15470051566838558,20.0,12.0,0.04165013883379612,12.0,6.0,0.017453391511305042,8.0,42.0,0.01745339151130504,99.0,64.0,0.20904403014676717,0.0,0.0,0.0,19.0,15.0,0.03689012296707656,5.0,10.0,0.00872669575565252,"stevenson, s"
+554,0.0,0.0,0.0,3.0,22.0,0.024357239512855212,139.0,71.0,0.803788903924222,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,4.0,0.0,2.0,8.0,0.01623815967523681,15.0,17.0,0.048714479025710425,1.0,11.0,0.004059539918809202,0.0,0.0,0.0,4.0,12.0,0.02029769959404601,1.0,4.0,0.004059539918809202,3.0,12.0,0.012178619756427606,6.0,2.0,0.017591339648173207,9.0,0.0,0.018267929634641408,0.0,0.0,0.0,0.0,221.0,0.0,8.0,39.0,0.030446549391069014,"schmid, h"
+555,0.0,0.0,0.0,4.0,23.0,0.028060798396525807,44.0,3.0,0.28996158343076667,0.0,0.0,0.0,0.0,0.0,0.0,47.0,74.0,0.19625856021379656,0.0,0.0,0.0,9.0,36.0,0.05612159679305161,0.0,2.0,0.0,2.0,0.0,0.014030399198262903,13.0,63.0,0.06079839652580591,0.0,0.0,0.0,9.0,37.0,0.05845999665942876,10.0,44.0,0.03507599799565726,26.0,53.0,0.12861199265074327,10.0,14.0,0.05545348254551529,3.0,68.0,0.021045598797394358,0.0,0.0,0.0,1.0,0.0,0.004676799732754301,11.0,25.0,0.05144479706029731,"ros{\'e}, c"
+556,0.0,0.0,0.0,1.0,0.0,0.00750938673341677,18.0,13.0,0.11389236545682101,0.0,0.0,0.0,0.0,0.0,0.0,10.0,115.0,0.0750938673341677,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,19.0,9.0,0.12891113892365452,1.0,0.0,0.00750938673341677,0.0,0.0,0.0,4.0,0.0,0.03003754693366708,3.0,3.0,0.02252816020025031,20.0,2.0,0.09386733416770962,1.0,6.0,0.00750938673341677,69.0,17.0,0.49561952440550683,0.0,0.0,0.0,2.0,0.0,0.010012515644555693,1.0,10.0,0.00750938673341677,"lascarides, a"
+557,0.0,0.0,0.0,1.0,9.0,0.0017379885678974203,3.0,10.0,0.006372624748957207,0.0,0.0,0.0,0.0,0.0,0.0,106.0,120.0,0.3398733199443844,0.0,0.0,0.0,0.0,10.0,0.0,0.0,2.0,0.0,4.0,25.0,0.014483238065811832,2.0,136.0,0.004634636181059787,0.0,0.0,0.0,22.0,34.0,0.06395797929862505,25.0,17.0,0.0675884443071219,26.0,46.0,0.06140892939904217,140.0,124.0,0.3668314537308822,7.0,22.0,0.01807508110613317,0.0,0.0,0.0,3.0,20.0,0.0063726247489572065,15.0,2.0,0.048663679901127764,"hirschberg, j"
+558,0.0,0.0,0.0,0.0,0.0,0.0,36.0,15.0,0.16907879623248334,0.0,0.0,0.0,0.0,0.0,0.0,8.0,6.0,0.04135079255685734,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.004594532506317482,2.0,4.0,0.010337698139214336,1.0,0.0,0.0034458993797381117,0.0,0.0,0.0,18.0,0.0,0.11164713990351481,1.0,0.0,0.004594532506317482,4.0,0.0,0.02343211578221916,6.0,2.0,0.01722949689869056,31.0,55.0,0.1447277739490007,0.0,0.0,0.0,52.0,14.0,0.24282104295887896,53.0,11.0,0.22674017918676775,"shimazu, a"
+559,0.0,0.0,0.0,0.0,236.0,0.0,4.0,68.0,0.02857142857142857,0.0,0.0,0.0,0.0,0.0,0.0,4.0,96.0,0.02857142857142857,0.0,0.0,0.0,0.0,110.0,0.0,0.0,141.0,0.0,0.0,20.0,0.0,1.0,15.0,0.007142857142857143,0.0,0.0,0.0,11.0,72.0,0.07857142857142857,0.0,39.0,0.0,11.0,26.0,0.07857142857142857,0.0,57.0,0.0,0.0,122.0,0.0,0.0,0.0,0.0,74.0,416.0,0.4119047619047619,55.0,136.0,0.36666666666666664,"haffari, g"
+560,0.0,0.0,0.0,0.0,4.0,0.0,98.0,116.0,0.8672000000000001,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,10.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,17.0,0.0,4.0,54.0,0.0432,6.0,19.0,0.0576,0.0,0.0,0.0,0.0,0.0,0.0,2.0,70.0,0.032,0.0,6.0,0.0,"loftsson, h"
+561,0.0,0.0,0.0,8.0,6.0,0.06582556226001096,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,49.0,123.0,0.18431157432803072,0.0,0.0,0.0,0.0,20.0,0.0,0.0,0.0,0.0,14.0,10.0,0.1124520021941854,0.0,7.0,0.0,0.0,0.0,0.0,4.0,41.0,0.013165112452002194,50.0,56.0,0.35216675809105863,11.0,4.0,0.0822819528250137,14.0,1.0,0.09051014810751508,20.0,9.0,0.09928688974218321,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,"mccoy, k"
+562,0.0,0.0,0.0,0.0,33.0,0.0,2.0,0.0,0.01875,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,0.0,0.0,0.0,0.0,17.0,0.0,0.0,21.0,0.0,20.0,22.0,0.16874999999999998,3.0,57.0,0.028124999999999997,0.0,0.0,0.0,44.0,48.0,0.39062499999999994,26.0,18.0,0.24375,0.0,6.0,0.0,0.0,6.0,0.0,2.0,11.0,0.01875,0.0,0.0,0.0,4.0,0.0,0.0375,10.0,14.0,0.09375,"fukumoto, f"
+563,0.0,0.0,0.0,1.0,40.0,0.0018264363617387673,13.0,1.0,0.020090799979126443,0.0,0.0,0.0,0.0,0.0,0.0,351.0,158.0,0.5494094522430376,0.0,0.0,0.0,3.0,3.0,0.0036528727234775345,4.0,23.0,0.004870496964636713,3.0,4.0,0.005479309085216302,24.0,51.0,0.040181599958252885,0.0,0.0,0.0,9.0,28.0,0.011993598775417905,51.0,116.0,0.09040859990606899,13.0,55.0,0.021612830280575413,96.0,19.0,0.133764720207344,26.0,56.0,0.05114021812868549,0.0,0.0,0.0,3.0,3.0,0.0036528727234775345,42.0,32.0,0.06191619266294421,"litman, d"
+564,0.0,0.0,0.0,0.0,2.0,0.0,133.0,266.0,0.8243243243243243,0.0,0.0,0.0,0.0,0.0,0.0,7.0,4.0,0.036036036036036036,0.0,0.0,0.0,0.0,0.0,0.0,3.0,2.0,0.013513513513513514,0.0,15.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.013513513513513514,0.0,29.0,0.0,0.0,0.0,0.0,5.0,15.0,0.03153153153153153,1.0,4.0,0.0045045045045045045,0.0,0.0,0.0,4.0,15.0,0.03153153153153153,7.0,2.0,0.04504504504504504,"nederhof, m"
+565,0.0,0.0,0.0,0.0,29.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.005847953216374269,2.0,21.0,0.017543859649122806,5.0,0.0,0.029239766081871343,1.0,0.0,0.005847953216374269,0.0,0.0,0.0,1.0,4.0,0.005847953216374269,0.0,0.0,0.0,3.0,7.0,0.017543859649122806,127.0,155.0,0.8918128654970761,0.0,0.0,0.0,0.0,0.0,0.0,4.0,6.0,0.02631578947368421,0.0,4.0,0.0,"hung, j"
+566,0.0,0.0,0.0,0.0,11.0,0.0,17.0,118.0,0.16818181818181818,0.0,0.0,0.0,0.0,0.0,0.0,0.0,10.0,0.0,0.0,0.0,0.0,3.0,2.0,0.05454545454545454,0.0,95.0,0.0,26.0,73.0,0.2712121212121212,0.0,23.0,0.0,0.0,0.0,0.0,0.0,5.0,0.0,6.0,13.0,0.05454545454545454,4.0,41.0,0.04090909090909091,1.0,59.0,0.00909090909090909,1.0,12.0,0.00909090909090909,0.0,0.0,0.0,49.0,251.0,0.3924242424242424,0.0,5.0,0.0,"stymne, s"
+567,0.0,0.0,0.0,0.0,24.0,0.0,1.0,10.0,0.0056603773584905665,0.0,0.0,0.0,0.0,0.0,0.0,6.0,17.0,0.033962264150943396,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,55.0,2.0,0.29056603773584905,0.0,1.0,0.0,0.0,0.0,0.0,1.0,6.0,0.0056603773584905665,15.0,0.0,0.0849056603773585,6.0,5.0,0.033962264150943396,4.0,0.0,0.022641509433962266,43.0,2.0,0.1792452830188679,0.0,0.0,0.0,44.0,76.0,0.2490566037735849,23.0,14.0,0.09433962264150943,"venkatapathy, s"
+568,0.0,0.0,0.0,26.0,64.0,0.11148365465213747,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,70.0,86.0,0.3042749371332774,0.0,0.0,0.0,5.0,37.0,0.020117351215423303,0.0,15.0,0.0,10.0,2.0,0.04191114836546521,1.0,12.0,0.005029337803855826,0.0,0.0,0.0,27.0,21.0,0.11735121542330258,6.0,18.0,0.030176026823134954,3.0,19.0,0.015088013411567477,0.0,30.0,0.0,62.0,12.0,0.3260687342833194,0.0,0.0,0.0,2.0,0.0,0.010058675607711651,5.0,27.0,0.018440905280804692,"zukerman, i"
+569,0.0,0.0,0.0,2.0,0.0,0.021613832853025934,25.0,34.0,0.22838616714697402,0.0,0.0,0.0,0.0,0.0,0.0,13.0,0.0,0.14048991354466858,0.0,0.0,0.0,2.0,2.0,0.010806916426512967,0.0,4.0,0.0,10.0,0.0,0.1015850144092219,0.0,2.0,0.0,0.0,0.0,0.0,3.0,41.0,0.01621037463976945,5.0,5.0,0.05403458213256483,9.0,37.0,0.06159942363112391,2.0,16.0,0.021613832853025934,9.0,4.0,0.08861671469740633,0.0,0.0,0.0,14.0,0.0,0.11887608069164264,20.0,53.0,0.1361671469740634,"ringger, e"
+570,0.0,0.0,0.0,4.0,38.0,0.04173913043478261,58.0,99.0,0.3321739130434783,0.0,0.0,0.0,0.0,0.0,0.0,10.0,33.0,0.04,0.0,0.0,0.0,0.0,5.0,0.0,6.0,5.0,0.04,2.0,7.0,0.008695652173913044,0.0,56.0,0.0,0.0,0.0,0.0,10.0,35.0,0.04173913043478261,11.0,82.0,0.05739130434782609,6.0,18.0,0.04173913043478261,52.0,36.0,0.21217391304347827,16.0,37.0,0.1391304347826087,0.0,0.0,0.0,4.0,8.0,0.017391304347826087,5.0,50.0,0.02782608695652174,"penn, g"
+571,0.0,0.0,0.0,0.0,7.0,0.0,109.0,147.0,0.5270632368703109,0.0,0.0,0.0,0.0,0.0,0.0,73.0,41.0,0.2757234726688103,0.0,0.0,0.0,0.0,9.0,0.0,0.0,1.0,0.0,20.0,20.0,0.09458735262593783,0.0,9.0,0.0,0.0,0.0,0.0,3.0,0.0,0.009646302250803861,1.0,3.0,0.0032154340836012866,9.0,6.0,0.031350482315112546,5.0,32.0,0.02116827438370847,0.0,50.0,0.0,0.0,0.0,0.0,8.0,19.0,0.037245444801714905,0.0,0.0,0.0,"matsubara, s"
+572,0.0,0.0,0.0,0.0,3.0,0.0,128.0,89.0,0.3776754318608926,0.0,0.0,0.0,0.0,0.0,0.0,14.0,63.0,0.0375205619772429,0.0,0.0,0.0,7.0,1.0,0.006530753118533565,10.0,15.0,0.02490393855867466,11.0,5.0,0.03584077311451221,1.0,26.0,0.002263994414424969,0.0,0.0,0.0,9.0,31.0,0.013789784160588448,14.0,64.0,0.04527988828849938,7.0,13.0,0.01811195531539975,61.0,23.0,0.1616492011899428,34.0,5.0,0.14263164810877305,0.0,0.0,0.0,15.0,7.0,0.04211029610830442,29.0,23.0,0.09169177378421126,"charniak, e"
+573,0.0,0.0,0.0,0.0,22.0,0.0,0.0,4.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,13.0,0.0025850926324859974,0.0,0.0,0.0,2.0,1.0,0.00861697544161999,0.0,14.0,0.0,91.0,166.0,0.42654028436018954,0.0,56.0,0.0,0.0,0.0,0.0,1.0,7.0,0.006462731581214993,0.0,6.0,0.0,1.0,11.0,0.004308487720809995,33.0,30.0,0.33175355450236965,27.0,61.0,0.1809564842740198,0.0,0.0,0.0,1.0,7.0,0.004308487720809995,6.0,3.0,0.03446790176647996,"chung, s"
+574,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,5.0,9.0,0.02034587995930824,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,16.0,2.0,0.09359104781281789,1.0,6.0,0.01017293997965412,0.0,0.0,0.0,17.0,1.0,0.14649033570701933,6.0,30.0,0.030518819938962362,20.0,69.0,0.10579857578840285,1.0,7.0,0.00508646998982706,0.0,1.0,0.0,0.0,0.0,0.0,74.0,98.0,0.5879959308240081,0.0,0.0,0.0,"kumaran, a"
+575,0.0,0.0,0.0,0.0,78.0,0.0,156.0,109.0,0.30506084421346175,0.0,0.0,0.0,0.0,0.0,0.0,4.0,36.0,0.007415388515654708,0.0,0.0,0.0,0.0,5.0,0.0,6.0,45.0,0.009063252630244644,31.0,12.0,0.05685131195335276,3.0,11.0,0.004325643300798579,0.0,0.0,0.0,10.0,12.0,0.01853847128913677,6.0,12.0,0.009681201673215869,21.0,12.0,0.03831284066421599,44.0,20.0,0.08156927367220179,84.0,12.0,0.13461782228419317,0.0,0.0,0.0,108.0,438.0,0.14238179743947266,109.0,88.0,0.19218215236405117,"sarkar, a"
+576,0.0,0.0,0.0,0.0,0.0,0.0,4.0,52.0,0.030136986301369864,0.0,0.0,0.0,0.0,0.0,0.0,5.0,25.0,0.04109589041095891,0.0,0.0,0.0,7.0,6.0,0.05753424657534247,14.0,5.0,0.08219178082191782,15.0,32.0,0.09589041095890413,2.0,42.0,0.010958904109589041,0.0,0.0,0.0,19.0,15.0,0.14794520547945206,3.0,1.0,0.01917808219178082,4.0,10.0,0.03287671232876713,20.0,12.0,0.10410958904109589,37.0,31.0,0.24657534246575344,0.0,0.0,0.0,6.0,14.0,0.04931506849315069,11.0,32.0,0.08219178082191782,"bergsma, s"
+577,0.0,0.0,0.0,5.0,15.0,0.022918918918918924,12.0,140.0,0.03783783783783784,0.0,0.0,0.0,0.0,0.0,0.0,2.0,11.0,0.006486486486486487,0.0,0.0,0.0,1.0,9.0,0.002162162162162162,0.0,17.0,0.0,14.0,96.0,0.03135135135135135,1.0,12.0,0.0032432432432432435,0.0,0.0,0.0,42.0,76.0,0.1718918918918919,3.0,11.0,0.0075675675675675675,10.0,17.0,0.02918918918918919,60.0,95.0,0.17427027027027028,3.0,12.0,0.016216216216216217,0.0,0.0,0.0,84.0,106.0,0.33924324324324323,61.0,17.0,0.15762162162162163,"kondrak, g"

File diff suppressed because it is too large
+ 58290 - 0
output/acl_2002_2022/articles.csv


+ 1 - 0
output/acl_2002_2022/authors_full_records.pickle

@@ -0,0 +1 @@
+../../.git/annex/objects/wV/J8/MD5E-s296079362--59b159e18d2ee633e45f54d91dd52455/MD5E-s296079362--59b159e18d2ee633e45f54d91dd52455

File diff suppressed because it is too large
+ 10825 - 0
output/acl_2002_2022/brokerage.csv


File diff suppressed because it is too large
+ 13474 - 0
output/acl_2002_2022/cost_vs_nu_knowledge.eps


+ 1 - 0
output/acl_2002_2022/dataset.pickle

@@ -0,0 +1 @@
+../../.git/annex/objects/Z6/21/MD5E-s37618571--7d4899ae27ce960b6c81fe267ee5a736/MD5E-s37618571--7d4899ae27ce960b6c81fe267ee5a736

+ 1 - 0
output/acl_2002_2022/ei_samples_control_nu.npz

@@ -0,0 +1 @@
+../../.git/annex/objects/5v/7G/MD5E-s800809813--6d8467963be8c83f01971467b96708a4.npz/MD5E-s800809813--6d8467963be8c83f01971467b96708a4.npz

+ 1 - 0
output/acl_2002_2022/embeddings.bin

@@ -0,0 +1 @@
+../../.git/annex/objects/PV/z3/MD5E-s3699310--c6bbbb6dfdc244d8a91d8ab9f2bc6337.bin/MD5E-s3699310--c6bbbb6dfdc244d8a91d8ab9f2bc6337.bin

+ 1 - 0
output/acl_2002_2022/etm_instance.pickle

@@ -0,0 +1 @@
+../../.git/annex/objects/fv/w9/MD5E-s105964159--00635c343eac76e253622afced1ff32f/MD5E-s105964159--00635c343eac76e253622afced1ff32f

+ 1 - 0
output/acl_2002_2022/keywords.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/Jw/pK/MD5E-s21955008--42972d707477818e8876b11b36024c53.npy/MD5E-s21955008--42972d707477818e8876b11b36024c53.npy

+ 20 - 0
output/acl_2002_2022/largest_transfers.tex

@@ -0,0 +1,20 @@
+\begin{table}
+\caption{Largest transfers across research areas.}
+\label{table:largest_transfers}
+\begin{tabular}{b{0.4\textwidth}|b{0.4\textwidth}|c}
+\toprule
+Origin research area & Target research area & Magnitude \\ \hline
+\midrule
+Grammar & Translation & 1.76 \\ \hline
+Translation & Deep learning & 1.57 \\ \hline
+Speech & Deep learning & 1.55 \\ \hline
+Translation & Dialogues, generation, interfaces & 1.29 \\ \hline
+Clustering & Sentiment detection & 1.14 \\ \hline
+Translation & Corpora and pre-trained models & 1.10 \\ \hline
+Speech & Dialogues, generation, interfaces & 1.07 \\ \hline
+Statistical modelling & Corpora and pre-trained models & 0.95 \\ \hline
+Translation & Speech & 0.95 \\ \hline
+Grammar & Deep learning & 0.93 \\ \hline
+\bottomrule
+\end{tabular}
+\end{table}

+ 1 - 0
output/acl_2002_2022/model

@@ -0,0 +1 @@
+../../.git/annex/objects/Qz/ZF/MD5E-s22987863--5b8f9dd52e4d89570ebe23030ac71c16/MD5E-s22987863--5b8f9dd52e4d89570ebe23030ac71c16

+ 20 - 0
output/acl_2002_2022/most_conservative.tex

@@ -0,0 +1,20 @@
+\begin{table}
+\caption{Most conservative research areas.}
+\label{table:most_conservative}
+\begin{tabular}{b{0.4\textwidth}|b{0.4\textwidth}|c}
+\toprule
+Research area & Conservatism \\ \hline
+\midrule
+Translation & 0.43 \\ \hline
+Sentiment detection & 0.40 \\ \hline
+Dialogues, generation, interfaces & 0.36 \\ \hline
+Software & 0.33 \\ \hline
+Grammar & 0.31 \\ \hline
+Deep learning & 0.31 \\ \hline
+Information retrieval & 0.30 \\ \hline
+Semantics & 0.30 \\ \hline
+Languages & 0.26 \\ \hline
+Knowledge & 0.21 \\ \hline
+\bottomrule
+\end{tabular}
+\end{table}

File diff suppressed because it is too large
+ 5967 - 0
output/acl_2002_2022/ngrams.csv


+ 1 - 0
output/acl_2002_2022/nu_expertise.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/m2/K5/MD5E-s1696--865702a6c8bd35eee9503561d731a855.npy/MD5E-s1696--865702a6c8bd35eee9503561d731a855.npy

+ 1 - 0
output/acl_2002_2022/nu_expertise_symmetric.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/8x/vK/MD5E-s1696--6e297e5e7b5f8a94d2a155201e98508d.npy/MD5E-s1696--6e297e5e7b5f8a94d2a155201e98508d.npy

+ 1 - 0
output/acl_2002_2022/nu_ling.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/kv/Pj/MD5E-s1696--1c82dd44bc039d9b6e5d62ed9afc9582.npy/MD5E-s1696--1c82dd44bc039d9b6e5d62ed9afc9582.npy

+ 1 - 0
output/acl_2002_2022/nu_ling_symmetric.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/8V/fp/MD5E-s1696--9fb43db8186a9957c1fbbc42176a05a6.npy/MD5E-s1696--9fb43db8186a9957c1fbbc42176a05a6.npy

+ 19 - 0
output/acl_2002_2022/params.yml

@@ -0,0 +1,19 @@
+!!python/object:argparse.Namespace
+add_title: true
+blacklist: null
+constant_sampling: 12000
+dataset: ../acl
+dimensions: 50
+filter: no-filter
+lemmatize: true
+limit_redundancy: false
+location: output/acl_2002_2022
+max_df: 0.15
+min_df: 0.00075
+pre_trained_embeddings: true
+remove_latex: true
+samples: 300000
+threads: 30
+topics: 20
+use_saved_embeddings: true
+values: []

+ 1 - 0
output/acl_2002_2022/pooled_resources.parquet

@@ -0,0 +1 @@
+../../.git/annex/objects/Kf/VW/MD5E-s86719--b67d8df336c28cf6fb3776645946691e/MD5E-s86719--b67d8df336c28cf6fb3776645946691e

+ 1 - 0
output/acl_2002_2022/sankey_control_nu.pdf

@@ -0,0 +1 @@
+/annex/objects/MD5E-s21783--68f32a04669b598c7250700151452194.pdf

+ 1 - 0
output/acl_2002_2022/sankey_control_nu_acl.pdf

@@ -0,0 +1 @@
+../../.git/annex/objects/59/mK/MD5E-s21783--68f32a04669b598c7250700151452194.pdf/MD5E-s21783--68f32a04669b598c7250700151452194.pdf

+ 1 - 0
output/acl_2002_2022/scores.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/zX/p2/MD5E-s368--ea3a48ae4ddaa1b5e6a4fe45d68ea5ec.npy/MD5E-s368--ea3a48ae4ddaa1b5e6a4fe45d68ea5ec.npy

File diff suppressed because it is too large
+ 11110 - 0
output/acl_2002_2022/topic_citation_matrix.eps


+ 21 - 0
output/acl_2002_2022/topics.csv

@@ -0,0 +1,21 @@
+,top_words,label
+0,"previous,experimental,experimental_result,novel,state_of_the_art,significant,previous_work,baseline,show,empirical_result,future,conventional,state_of_theart,demonstrate,state_ofthe_art,outperforms,strong,recent,recent_work,improved","Junk"
+1,"network,neural,representation,convolutional,sequence,architecture,neural_network,input,mechanism,graph,rnns,encoders,lstm,memory,layer,recurrent,module,abstraction,encoding,attention","Deep learning"
+2,"dependency,grammar,parser,parsing,tree,treebank,tag,morphological,deterministic,treebanks,rule,derivation,formalism,analyzer,morphology,constraint,parse,universal,turkish,syntactic_dependency","Syntactic Parsing \& Grammar"
+3,"dans,le,nous,un,une,la,article,du,par,de,pour,ca,sur,en,particulier,non,ce,et,que,premier","Junk"
+4,"entity,relation,domain,extraction,event,type,terminology,temporal,concept,term,biomedical,mention,attribute,literature,expression,extract,record,temporal_expression,name,trigger","Junk"
+5,"natural,dialog,natural_language,dialogue,utterance,understanding,user,spoken,interaction,conversation,video,natural_language_generation,multimodal,ubiquitous,intention,response,interface,generation,instruction,simulation","Dialogues \& interfaces"
+6,"set,score,accuracy,test,classifier,f_score,reasonable,best,average,baseline,combination,recall,rank,second,best_result,run,precision,submission,ensemble,rule","Junk"
+7,"question,knowledge,dataset,fact,answer,evidence,datasets,commonsense,reasoning,answering,qa,base,benchmark,challenge,testbed,comprehension,passage,gap,multi_hop,large_scale","Question Answering \& Knowledge Bases"
+8,"embeddings,datasets,pre_trained,training,augmentation,fine_tune,fine_tuned,cross_lingual,http,pretrained,unavailable,sample,dataset,al.,example,transferability,setting,benchmark,counterpart,fine_tuning","Embeddings \& Pre-trained Models"
+9,"english,lexical,verb,entry,lexicon,construction,japanese,english_word,french,thesaurus,meaning,german,compound,spanish,us,expression,synonym,dictionary,noun,collocation","Lexical Resources \& Multilingual Entries"
+10,"sentiment,classification,detection,tweet,comment,twitter,emotion,sentiment_analysis,social_medium,stance,post,medium,social,sentiment_classification,opinion,affect,facebook,mining,feed,identifying","Sentiment Analysis"
+11,"linguistic,scholar,computational,literary,background,body,scientist,philosophy,science,cognitive,linguist,expertise,computational_analysis,linguistic_analysis,extent,linguistics,applied,academic,scientific,technological","Junk"
+12,"document,context,space,similarity,vector,topic,disambiguation,cluster,technique,query,clustering,sense,relatedness,co_occurrence,retrieval,semantic_similarity,search,snippet,window,indexing","Document Similarity \& Semantic Analysis"
+13,"automatic,human,metric,summary,summarization,generation,news,story,quality,headline,human_evaluation,assessment,fluency,content,diversity,coherent,automated,ranking,automatic_summarization,informativeness","Information retrieval"
+14,"annotation,tool,processing,nlp,resource,standard,effort,application,research,toolkit,challenge,arabic,area,project,natural_language_processing,community,researcher,development,api,guideline","NLP Tools \& Resources"
+15,"error,speech,recognition,chinese,character,asr,segmentation,correction,speaker,study,rate,pronunciation,mandarin,native,accent,vietnamese,disfluency,learner,track,transcription","Speech"
+16,"semantic,role,discourse,predicate,argument,relation,semantics,semantic_structure,implicit,syntactic,resolution,co_reference,structure,parsing,structural,anaphora,semantic_role,semantic_task,textual,semantic_parsing","Semantics \& Discourse"
+17,"several,first,time,many,various,multiple,single,number,possible,numerous,first_time,step,range,potential,larger,ten,essential,optimized,crucial,impractical","Junk"
+18,"smt,quality,statistical,estimation,statistical_machine,bilingual,statistical_machine_translation,nmt,syntax_based,bitexts,alignment,pair,source,mt,neural_machine,neural_machine_translation,bleu,parallel,pbsmt,automatic_translation","Statistical \& Neural Machine Translation"
+19,"learning,framework,algorithm,optimization,field,problem,random,decision,function,conditional,estimator,bayesian,sequential,joint,hidden,markov,semi_supervised,active,perceptron,policy","Learning Algorithms \& Optimization Techniques"

+ 1 - 0
output/acl_2002_2022/topics_counts.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/2k/J0/MD5E-s9326368--421a92ecdde721ec04122003728a4d4f.npy/MD5E-s9326368--421a92ecdde721ec04122003728a4d4f.npy

+ 1 - 0
output/acl_2002_2022/topics_order.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/gx/Wv/MD5E-s240--39d8655e7a5cc9a96bf769298882516d.npy/MD5E-s240--39d8655e7a5cc9a96bf769298882516d.npy

File diff suppressed because it is too large
+ 2196 - 0
output/authors_brokerage.csv


File diff suppressed because it is too large
+ 1931 - 0
output/authors_centrality.csv


+ 1 - 0
output/etm_20_pretrained/.DS_Store

@@ -0,0 +1 @@
+../../.git/annex/objects/kf/M8/MD5E-s6148--194577a7e20bdcc7afbb718f502c134c/MD5E-s6148--194577a7e20bdcc7afbb718f502c134c

+ 1 - 0
output/etm_20_pretrained/age_vs_position.png

@@ -0,0 +1 @@
+../../.git/annex/objects/9x/zv/MD5E-s41783--b19649ca03421c8a76ed60db0516f036.png/MD5E-s41783--b19649ca03421c8a76ed60db0516f036.png

File diff suppressed because it is too large
+ 2196 - 0
output/etm_20_pretrained/aggregate.csv


File diff suppressed because it is too large
+ 186163 - 0
output/etm_20_pretrained/articles.csv


+ 1 - 0
output/etm_20_pretrained/authors_full_records.pickle

@@ -0,0 +1 @@
+../../.git/annex/objects/GV/Vp/MD5E-s655733001--1b1ccf65ca24580f829ae5e57f2c8150/MD5E-s655733001--1b1ccf65ca24580f829ae5e57f2c8150

+ 1 - 0
output/etm_20_pretrained/authors_keywords_record.npz

@@ -0,0 +1 @@
+../../.git/annex/objects/xQ/1w/MD5E-s2030672--af9380409f0b7e3369331c23343f5a44.npz/MD5E-s2030672--af9380409f0b7e3369331c23343f5a44.npz

File diff suppressed because it is too large
+ 23547 - 0
output/etm_20_pretrained/brokerage.csv


+ 6 - 0
output/etm_20_pretrained/capital_measures.csv

@@ -0,0 +1,6 @@
+,Unnamed: 0_x,start_1,end_1,expertise_1,start_2,end_2,expertise_2,start_3,end_3,expertise_3,start_4,end_4,expertise_4,start_5,end_5,expertise_5,start_6,end_6,expertise_6,start_7,end_7,expertise_7,start_8,end_8,expertise_8,start_9,end_9,expertise_9,start_10,end_10,expertise_10,start_11,end_11,expertise_11,start_12,end_12,expertise_12,start_13,end_13,expertise_13,start_14,end_14,expertise_14,start_15,end_15,expertise_15,start_16,end_16,expertise_16,start_17,end_17,expertise_17,start_18,end_18,expertise_18,start_19,end_19,expertise_19,start_20,end_20,expertise_20,pooled_resources,research_diversity,social_diversity,intellectual_diversity,social_magnitude,intellectual_stirling,social_stirling,excess_social_diversity,excess_social_stirling,Unnamed: 0_y,brokerage,degree
+mean,1096.8605287146763,62.890610756608936,45.73655423883318,0.07328036555769059,92.29443938012763,35.25159525979945,0.11770560791240284,44.60574293527803,25.21877848678213,0.05968199666033241,0.0,0.0,0.0,26.619416590701913,21.820875113947128,0.035205018322006724,87.72743846855059,59.02917046490428,0.0996793834883834,37.74567000911577,26.12260711030082,0.050015807660719176,0.0,0.0,0.0,79.14448495897904,37.341841385597085,0.08140163571660784,38.16453965360073,34.2538742023701,0.05028589878061855,50.05059252506837,30.307657247037376,0.06221887208283163,0.0,0.0,0.0,0.0,0.0,0.0,33.97948951686418,52.33637192342753,0.03853371119383729,32.275296262534184,23.580674567000912,0.04326812731059411,38.90929808568824,27.57383773928897,0.05011398007260723,104.15041020966272,70.57110300820419,0.10062191114556264,48.71103008204193,34.81084776663628,0.06549772650763844,22.470373746581586,17.240656335460347,0.03673399410033161,25.7616226071103,26.962169553327257,0.03575596348783511,"[0.65109277 1.21143537 0.52102686 0.         0.32169839 0.88587754
+ 0.46152954 0.         0.80231566 0.44165957 0.61516255 0.
+ 0.         0.36094724 0.40677678 0.47347229 1.08248172 0.53431475
+ 0.34403412 0.33269506]",6.170389436645058,8.501217915401353,6.092267370810347,9.446520219981013,0.4730547902969735,0.5508875865939536,0.004368396762728921,0.0002557203383153612,5063.0916134913405,44.80506006366023,10.82377883459128
+std,633.8970340356047,136.19834411816285,113.43601688303553,0.12866953840678674,182.75871453707256,83.45828603661717,0.18928416932501696,79.84806013637288,45.71162501315768,0.09684092204466932,0.0,0.0,0.0,27.900687114735003,27.47390756449949,0.033580014194481665,209.51603735271044,140.50801927527698,0.19833739970770667,40.2681395902263,31.741963078922243,0.04597035361247265,0.0,0.0,0.0,221.66806914065444,101.46645147397231,0.16947450071300124,100.60653631220639,95.07737409250996,0.10783122884388115,55.473229985054836,33.26634855074712,0.05154927606365638,0.0,0.0,0.0,0.0,0.0,0.0,94.74121538138365,128.7281992177133,0.09470060515269516,37.557906172091876,28.523350012619158,0.04500419694403698,98.15314385707796,77.0135944149388,0.10404546283517781,217.52660352169727,133.4096294612293,0.1448320153706696,97.73396237860004,75.83907441622516,0.11154605758400196,48.470771342660306,34.4309014369307,0.08024927582408375,47.7888096492231,57.59183019875305,0.06251771553797464,,2.1678528711447713,2.215671427943636,2.1710788790924274,5.743170151753923,0.09867248851083135,0.07163586080936535,1.4531890586745433,0.05193824206030997,4986.433904385217,66.1629678931609,6.276507540988735

File diff suppressed because it is too large
+ 9518 - 0
output/etm_20_pretrained/capital_measures.eps


File diff suppressed because it is too large
+ 9431 - 0
output/etm_20_pretrained/change_score.eps


File diff suppressed because it is too large
+ 10043 - 0
output/etm_20_pretrained/change_score_effects_entropy_brokerage.eps


File diff suppressed because it is too large
+ 10048 - 0
output/etm_20_pretrained/change_score_effects_entropy_magnitude.eps


File diff suppressed because it is too large
+ 10599 - 0
output/etm_20_pretrained/cost_delta.eps


File diff suppressed because it is too large
+ 10597 - 0
output/etm_20_pretrained/cost_delta_identity.eps


File diff suppressed because it is too large
+ 10549 - 0
output/etm_20_pretrained/cost_delta_knowledge.eps


File diff suppressed because it is too large
+ 10998 - 0
output/etm_20_pretrained/cost_matrix_counterfactual_couplings_knowledge_bounded.eps


File diff suppressed because it is too large
+ 10588 - 0
output/etm_20_pretrained/cost_matrix_knowledge_bounded.eps


File diff suppressed because it is too large
+ 10902 - 0
output/etm_20_pretrained/cost_matrix_predicted_couplings_knowledge_bounded.eps


File diff suppressed because it is too large
+ 11002 - 0
output/etm_20_pretrained/cost_matrix_true_couplings_knowledge_bounded.eps


File diff suppressed because it is too large
+ 14773 - 0
output/etm_20_pretrained/cost_vs_nu.eps


File diff suppressed because it is too large
+ 14789 - 0
output/etm_20_pretrained/cost_vs_nu_identity.eps


File diff suppressed because it is too large
+ 14775 - 0
output/etm_20_pretrained/cost_vs_nu_knowledge.eps


+ 1 - 0
output/etm_20_pretrained/dataset.pickle

@@ -0,0 +1 @@
+../../.git/annex/objects/8M/0g/MD5E-s125920179--bca15cd429ae9491d421666d0fc09a17/MD5E-s125920179--bca15cd429ae9491d421666d0fc09a17

File diff suppressed because it is too large
+ 9184 - 0
output/etm_20_pretrained/disruption_score.eps


File diff suppressed because it is too large
+ 9837 - 0
output/etm_20_pretrained/disruption_score_effects_entropy_magnitude.eps


File diff suppressed because it is too large
+ 10611 - 0
output/etm_20_pretrained/ei_R_control_nu.eps


File diff suppressed because it is too large
+ 10616 - 0
output/etm_20_pretrained/ei_counts_control_nu.eps


File diff suppressed because it is too large
+ 11065 - 0
output/etm_20_pretrained/ei_delta_control_nu.eps


File diff suppressed because it is too large
+ 10766 - 0
output/etm_20_pretrained/ei_gamma_control_nu.eps


File diff suppressed because it is too large
+ 10413 - 0
output/etm_20_pretrained/ei_mu_control_nu.eps


+ 1 - 0
output/etm_20_pretrained/ei_samples_control_nu.npz

@@ -0,0 +1 @@
+../../.git/annex/objects/Vv/vQ/MD5E-s6526947812--36dddef366e30f1e97c60888b6436446.npz/MD5E-s6526947812--36dddef366e30f1e97c60888b6436446.npz

+ 1 - 0
output/etm_20_pretrained/ei_samples_control_nu_crossval.npz

@@ -0,0 +1 @@
+../../.git/annex/objects/61/0F/MD5E-s4145673789--5b4f3c827320ddfc40db4da2d5ae5f35.npz/MD5E-s4145673789--5b4f3c827320ddfc40db4da2d5ae5f35.npz

+ 1 - 0
output/etm_20_pretrained/embeddings.bin

@@ -0,0 +1 @@
+../../.git/annex/objects/97/4V/MD5E-s3381859--2346dc4fcdbdb895df85c1511542c6f6.bin/MD5E-s3381859--2346dc4fcdbdb895df85c1511542c6f6.bin

+ 1 - 0
output/etm_20_pretrained/embeddings.mdl

@@ -0,0 +1 @@
+../../.git/annex/objects/K5/5z/MD5E-s1528639--37b60ab62d269334888d4b1e96666279.mdl/MD5E-s1528639--37b60ab62d269334888d4b1e96666279.mdl

File diff suppressed because it is too large
+ 10105 - 0
output/etm_20_pretrained/entered_score_effects_entropy.eps


File diff suppressed because it is too large
+ 1381 - 0
output/etm_20_pretrained/entropy.eps


+ 1 - 0
output/etm_20_pretrained/etm_instance.pickle

@@ -0,0 +1 @@
+../../.git/annex/objects/9Q/1F/MD5E-s180102844--ba2b0edda6892bd2f9c85f2022603f33/MD5E-s180102844--ba2b0edda6892bd2f9c85f2022603f33

File diff suppressed because it is too large
+ 9109 - 0
output/etm_20_pretrained/exists.eps


File diff suppressed because it is too large
+ 10171 - 0
output/etm_20_pretrained/exited_score_effects_entropy.eps


File diff suppressed because it is too large
+ 10211 - 0
output/etm_20_pretrained/exited_score_effects_entropy_brokerage.eps


File diff suppressed because it is too large
+ 10513 - 0
output/etm_20_pretrained/exited_score_effects_entropy_magnitude.eps


+ 1 - 0
output/etm_20_pretrained/keywords.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/F1/w5/MD5E-s16723648--c2d89b19ffab8ee82f92571148f4532d.npy/MD5E-s16723648--c2d89b19ffab8ee82f92571148f4532d.npy

+ 1 - 0
output/etm_20_pretrained/keywords_topics.npy

@@ -0,0 +1 @@
+../../.git/annex/objects/55/FP/MD5E-s4468016--41b185c36efc5dd5722039c7b150bc52.npy/MD5E-s4468016--41b185c36efc5dd5722039c7b150bc52.npy

+ 1 - 0
output/etm_20_pretrained/knowledge_transfers_weights.npz

@@ -0,0 +1 @@
+../../.git/annex/objects/6X/85/MD5E-s170535--1dce439699eb3cea9f013fbd96531d9a.npz/MD5E-s170535--1dce439699eb3cea9f013fbd96531d9a.npz

+ 31 - 0
output/etm_20_pretrained/low_change.tex

@@ -0,0 +1,31 @@
+\begin{table}[H]
+\centering
+\caption{Physicists with the lowest change scores $c_a$. $D(\bm{I_a})$ and $D(\bm{S_a})$ measure the diversity of intellectual and social capital. Numbers in parentheses indicate the share of attention dedicated to each research area. Asterisks ($\ast$) indicate physicists with a permanent position.}
+\label{table:low_change}
+\begin{tabular}{p{0.15\textwidth}|c|c|c|b{0.25\textwidth}|b{0.25\textwidth}}
+\toprule
+              Physicist & $c_a$ & $D(\bm{I_a})$ & $D(\bm{S_a})$ &                   Previous main area &                       Current main area \\
+\midrule
+     J.~Huston ($\ast$) &  0.04 &          2.96 &          5.50 &              Collider physics (0.61) &              Collider physics (0.60)\\ \hline
+  U.~D.~Alesio ($\ast$) &  0.05 &          1.68 &          4.50 &              Collider physics (0.87) &              Collider physics (0.88)\\ \hline
+            S.~Schumann &  0.06 &          4.15 &          7.90 &              Collider physics (0.46) &              Collider physics (0.45)\\ \hline
+          A.~V.~Lipatov &  0.06 &          2.88 &          4.28 &              Collider physics (0.68) &              Collider physics (0.68)\\ \hline
+           Andreas Metz &  0.06 &          2.01 &          3.61 &              Collider physics (0.85) &              Collider physics (0.89)\\ \hline
+               I.~Vitev &  0.07 &          2.95 &          5.49 &              Collider physics (0.72) &              Collider physics (0.73)\\ \hline
+ Xin Nian Wang ($\ast$) &  0.07 &          2.61 &          4.06 &              Collider physics (0.77) &              Collider physics (0.75)\\ \hline
+             S.~Goswami &  0.07 &          2.15 &          5.22 &  Neutrinos \& flavour physics (0.83) &  Neutrinos \& flavour physics (0.79)\\ \hline
+         Y.~Mehtar Tani &  0.07 &          2.75 &          5.17 &              Collider physics (0.75) &              Collider physics (0.75)\\ \hline
+           M.~Trigiante &  0.07 &          5.11 &          6.42 & String theory \& supergravity (0.55) & String theory \& supergravity (0.56)\\ \hline
+     F.~Murgia ($\ast$) &  0.07 &          1.84 &          4.57 &              Collider physics (0.86) &              Collider physics (0.87)\\ \hline
+     B.~Kayser ($\ast$) &  0.07 &          2.07 &          4.94 &  Neutrinos \& flavour physics (0.76) &  Neutrinos \& flavour physics (0.77)\\ \hline
+   J.~F.~Owens ($\ast$) &  0.07 &          2.01 &          5.42 &              Collider physics (0.78) &              Collider physics (0.80)\\ \hline
+           A.~Bacchetta &  0.07 &          2.07 &          3.19 &              Collider physics (0.84) &              Collider physics (0.81)\\ \hline
+P.~M.~Nadolsky ($\ast$) &  0.07 &          3.19 &          5.28 &              Collider physics (0.62) &              Collider physics (0.59)\\ \hline
+            D.~Martelli &  0.07 &          4.42 &          5.97 & String theory \& supergravity (0.57) & String theory \& supergravity (0.58)\\ \hline
+              C.~Pisano &  0.08 &          1.77 &          4.53 &              Collider physics (0.87) &              Collider physics (0.88)\\ \hline
+   M.~Strikman ($\ast$) &  0.08 &          2.77 &          4.99 &              Collider physics (0.77) &              Collider physics (0.75)\\ \hline
+    J.~Nemchik ($\ast$) &  0.08 &          2.67 &          4.69 &              Collider physics (0.80) &              Collider physics (0.78)\\ \hline
+Zhi Zhong Xing ($\ast$) &  0.08 &          3.44 &          8.17 &  Neutrinos \& flavour physics (0.71) &  Neutrinos \& flavour physics (0.65)\\ \hline
+\bottomrule
+\end{tabular}
+\end{table}

+ 31 - 0
output/etm_20_pretrained/low_disruption.tex

@@ -0,0 +1,31 @@
+\begin{table}[H]
+\centering
+\caption{Physicists with the lowest change scores $c_a$. $D(\bm{I_a})$ and $D(\bm{S_a})$ measure the diversity of intellectual and social capital. Numbers in parentheses indicate the share of attention dedicated to each research area. Asterisks ($\ast$) indicate physicists with a permanent position.}
+\label{table:low_disruption}
+\begin{tabular}{p{0.15\textwidth}|c|c|c|b{0.25\textwidth}|b{0.25\textwidth}}
+\toprule
+              Physicist & $c_a$ & $D(\bm{I_a})$ & $D(\bm{S_a})$ &                   Previous main area &                       Current main area \\
+\midrule
+     J.~Huston ($\ast$) &  0.03 &          2.96 &          5.50 &              Collider physics (0.61) &              Collider physics (0.60)\\ \hline
+  U.~D.~Alesio ($\ast$) &  0.03 &          1.68 &          4.50 &              Collider physics (0.87) &              Collider physics (0.88)\\ \hline
+          A.~V.~Lipatov &  0.03 &          2.88 &          4.28 &              Collider physics (0.68) &              Collider physics (0.68)\\ \hline
+             S.~Goswami &  0.03 &          2.15 &          5.22 &  Neutrinos \& flavour physics (0.83) &  Neutrinos \& flavour physics (0.79)\\ \hline
+            S.~Schumann &  0.04 &          4.15 &          7.90 &              Collider physics (0.46) &              Collider physics (0.45)\\ \hline
+           M.~Trigiante &  0.04 &          5.11 &          6.42 & String theory \& supergravity (0.55) & String theory \& supergravity (0.56)\\ \hline
+               I.~Vitev &  0.04 &          2.95 &          5.49 &              Collider physics (0.72) &              Collider physics (0.73)\\ \hline
+           Andreas Metz &  0.04 &          2.01 &          3.61 &              Collider physics (0.85) &              Collider physics (0.89)\\ \hline
+           A.~Bacchetta &  0.04 &          2.07 &          3.19 &              Collider physics (0.84) &              Collider physics (0.81)\\ \hline
+            D.~Martelli &  0.04 &          4.42 &          5.97 & String theory \& supergravity (0.57) & String theory \& supergravity (0.58)\\ \hline
+ Xin Nian Wang ($\ast$) &  0.04 &          2.61 &          4.06 &              Collider physics (0.77) &              Collider physics (0.75)\\ \hline
+     F.~Murgia ($\ast$) &  0.04 &          1.84 &          4.57 &              Collider physics (0.86) &              Collider physics (0.87)\\ \hline
+              C.~Pisano &  0.05 &          1.77 &          4.53 &              Collider physics (0.87) &              Collider physics (0.88)\\ \hline
+         Y.~Mehtar Tani &  0.05 &          2.75 &          5.17 &              Collider physics (0.75) &              Collider physics (0.75)\\ \hline
+           E.~R.~Sharpe &  0.05 &          2.96 &          4.05 & String theory \& supergravity (0.75) & String theory \& supergravity (0.74)\\ \hline
+   J.~F.~Owens ($\ast$) &  0.05 &          2.01 &          5.42 &              Collider physics (0.78) &              Collider physics (0.80)\\ \hline
+    Qiang Zhao ($\ast$) &  0.05 &          3.84 &          5.50 &                       Hadrons (0.65) &                       Hadrons (0.69)\\ \hline
+   M.~Strikman ($\ast$) &  0.05 &          2.77 &          4.99 &              Collider physics (0.77) &              Collider physics (0.75)\\ \hline
+P.~M.~Nadolsky ($\ast$) &  0.05 &          3.19 &          5.28 &              Collider physics (0.62) &              Collider physics (0.59)\\ \hline
+    J.~Nemchik ($\ast$) &  0.05 &          2.67 &          4.69 &              Collider physics (0.80) &              Collider physics (0.78)\\ \hline
+\bottomrule
+\end{tabular}
+\end{table}

+ 0 - 0
output/etm_20_pretrained/low_entered.tex


Some files were not shown because too many files changed in this diff