blob: f69b667d0579dfec1a6bc197e1a2fed042ae11b6 [file] [log] [blame]
#!/usr/local/bin/perl
use Inline C;
use Mojolicious::Lite;
use Mojo::JSON qw(decode_json encode_json to_json);
use Encode qw(decode encode);
use Mojo::Server::Daemon;
# -cbow 1 -size 200 -window 8 -negative 25 -hs 0 -sample 1e-4 -threads 40 -binary 1 -iter 15
init_net("vectors14.bin");
get '/' => sub {
my $c = shift;
my $word=$c->param('word');
my $no_nbs=$c->param('n') || 100;
my @lists;
if(defined($word) && $word !~ /^\s*$/) {
$c->inactivity_timeout(300);
for my $w (split('\s+', $word)) {
$c->app->log->debug('Looking for neighbours of '.$w);
push(@lists, get_neighbours(encode("iso-8859-1", $w), $no_nbs));
}
}
$c->render(template=>"index", word=>$word, no_nbs=>$no_nbs, lists=> \@lists);
};
app->start;
exit;
__END__
__C__
#include <stdio.h>
#include <string.h>
#include <math.h>
#include <malloc.h>
#include <stdlib.h> //strlen
#define max_size 2000
#define max_w 50
#define MAX_NEIGHBOURS 1000
//the thread function
void *connection_handler(void *);
char *bestw[MAX_NEIGHBOURS];
char file_name[max_size], st[100][max_size];
float dist, len, bestd[MAX_NEIGHBOURS], vec[max_size];
long long words, size, a, b, c, d, cn, bi[100], besti[MAX_NEIGHBOURS];
char ch;
float *M;
char *vocab;
char *stringBuffer;
int init_net(char *file_name) {
FILE *f;
stringBuffer = malloc(64000);
f = fopen(file_name, "rb");
if (f == NULL) {
printf("Input file %s not found\n", file_name);
return -1;
}
fscanf(f, "%lld", &words);
fscanf(f, "%lld", &size);
vocab = (char *)malloc((long long)words * max_w * sizeof(char));
for (a = 0; a < MAX_NEIGHBOURS; a++) bestw[a] = (char *)malloc(max_size * sizeof(char));
M = (float *)malloc((long long)words * (long long)size * sizeof(float));
if (M == NULL) {
printf("Cannot allocate memory: %lld MB %lld %lld\n", (long long)words * size * sizeof(float) / 1048576, words, size);
return -1;
}
for (b = 0; b < words; b++) {
a = 0;
while (1) {
vocab[b * max_w + a] = fgetc(f);
if (feof(f) || (vocab[b * max_w + a] == ' ')) break;
if ((a < max_w) && (vocab[b * max_w + a] != '\n')) a++;
}
vocab[b * max_w + a] = 0;
fread(&M[b * size], sizeof(float), size, f);
len = 0;
for (a = 0; a < size; a++) len += M[a + b * size] * M[a + b * size];
len = sqrt(len);
for (a = 0; a < size; a++) M[a + b * size] /= len;
}
fclose(f);
return 0;
}
SV *get_neighbours(char *st1, int N) {
if(N>MAX_NEIGHBOURS) N=MAX_NEIGHBOURS;
FILE *out=stdout;
*stringBuffer=0;
for (a = 0; a < N; a++) bestd[a] = 0;
for (a = 0; a < N; a++) bestw[a][0] = 0;
a = 0;
cn = 0;
b = 0;
c = 0;
while (1) {
st[cn][b] = st1[c];
b++;
c++;
st[cn][b] = 0;
if (st1[c] == 0) break;
if (st1[c] == ' ') {
cn++;
b = 0;
c++;
}
}
cn++;
for (a = 0; a < cn; a++) {
for (b = 0; b < words; b++) if (!strcmp(&vocab[b * max_w], st[a])) break;
if (b == words) b = -1;
bi[a] = b;
fprintf(stderr, "Word: \"%s\" Position in vocabulary: %lld\n", st[a], bi[a]);
if (b == -1) {
fprintf(stderr, "Out of dictionary word!\n");
break;
}
}
if (b == -1) goto end;
for (a = 0; a < size; a++) vec[a] = 0;
for (b = 0; b < cn; b++) {
if (bi[b] == -1) continue;
for (a = 0; a < size; a++) vec[a] += M[a + bi[b] * size];
}
len = 0;
for (a = 0; a < size; a++) len += vec[a] * vec[a];
len = sqrt(len);
for (a = 0; a < size; a++) vec[a] /= len;
for (a = 0; a < N; a++) bestd[a] = -1;
for (a = 0; a < N; a++) bestw[a][0] = 0;
for (c = 0; c < words; c++) {
a = 0;
// do not skip taget word
// for (b = 0; b < cn; b++) if (bi[b] == c) a = 1;
// if (a == 1) continue;
dist = 0;
for (a = 0; a < size; a++) dist += vec[a] * M[a + c * size];
for (a = 0; a < N; a++) {
if (dist > bestd[a]) {
for (d = N - 1; d > a; d--) {
bestd[d] = bestd[d - 1];
besti[d] = besti[d - 1];
}
bestd[a] = dist;
besti[a] = c;
break;
}
}
}
AV* array = newAV();
for (a = 0; a < N; a++) {
strcpy(bestw[a], &vocab[besti[a] * max_w]);
HV* hash = newHV();
hv_store(hash, "word", strlen("word"), newSVpvf(bestw[a], 0), 0);
hv_store(hash, "dist", strlen("dist"), newSVnv(bestd[a]), 0);
AV *vector = newAV();
for (b = 0; b < size; b++) {
av_push(vector, newSVnv(M[b + besti[a] * size]));
}
hv_store(hash, "vector", strlen("vector"), newRV_noinc((SV*)vector), 0);
av_push(array, newRV_noinc((SV*)hash));
}
end:
return newRV_noinc((SV*)array);
}
__DATA__
@@ index.html.ep
<!DOCTYPE html>
<html>
<head>
<title>DeReKo-Word-Vector-Distances</title>
<script src="http://code.jquery.com/jquery-latest.min.js"></script>
<script src="//d3js.org/d3.v3.min.js" charset="utf-8"></script>
<script src="http://klinux10/word2vec/tsne.js"></script>
<style>
body {
font-family: Arial, sans-serif;
}
svg {
// border: 1px solid #333;
margin-right: 10px;
margin-bottom:10px;
}
#wrapper {
width: 100%;
// border: 1px solid red;
overflow: hidden; /* will contain if #first is longer than #second */
}
#first {
width: 300px;
margin-right: 20px;
float:left; /* add this */
// border: 1px solid green;
}
#second {
border: 1px solid #333;
height: 850px;
overflow: hidden; /* if you don't want #second to wrap below #first */
}
#cost {
z-index: 1;
position: fixed;
font-size: 10px;
color: #222222;
margin-bottom: 10px;
}
</style>
<script>
var opt = {epsilon: 1, perplexity: 20};
var T = new tsnejs.tSNE(opt); // create a tSNE instance
var Y;
var data;
function updateEmbedding() {
var Y = T.getSolution();
svg.selectAll('.u')
.data(data.words)
.attr("transform", function(d, i) { return "translate(" +
((Y[i][0]*20*ss + tx) + 400) + "," +
((Y[i][1]*20*ss + ty) + 400) + ")"; });
}
var svg;
function drawEmbedding() {
$("#embed").empty();
var div = d3.select("#embed");
// get min and max in each column of Y
var Y = T.Y;
svg = div.append("svg") // svg is global
.attr("width", 800)
.attr("height", 800);
var g = svg.selectAll(".b")
.data(data.words)
.enter().append("g")
.attr("class", "u");
g.append("a")
.attr("xlink:href", function(word) {return "/?word="+word;})
.append("text")
.attr("text-anchor", "top")
.attr("font-size", 12)
.attr("fill", function(d) {
if(data.target.indexOf(" "+d+" ") >= 0) {
return "red";
} else {
return "#333"
}
})
.text(function(d) { return d; });
var zoomListener = d3.behavior.zoom()
.scaleExtent([0.1, 10])
.center([0,0])
.on("zoom", zoomHandler);
zoomListener(svg);
}
var tx=0, ty=0;
var ss=1;
var iter_id=-1;
function zoomHandler() {
tx = d3.event.translate[0];
ty = d3.event.translate[1];
ss = d3.event.scale;
updateEmbedding();
}
var stepnum = 0;
function stopStep() {
clearInterval(iter_id);
}
function step() {
var i = T.iter;
if(i > 2000) {
stopStep();
} else {
var cost = Math.round(T.step() *1000) / 1000; // do a few steps
$("#cost").html("iteration " + i + ", cost: " + cost.toFixed(3));
updateEmbedding();
}
}
function showMap(j) {
data=j;
T.iter=0;
T.initDataRaw(data.vecs); // init embedding
drawEmbedding(); // draw initial embedding
if(iter_id >= 0) {
clearInterval(iter_id);
}
//T.debugGrad();
iter_id = setInterval(step, 1);
//step();
}
</script>
</head>
<body>
<form action="<%=url_for('/')->to_abs%>" method="GET">
word(s) (space-separated): <input type="text" name="word" value="<%= $word %>">
max. neighbours: <input type="text" name="n" value="<%= $no_nbs %>">
<input type="submit" value="Show">
</form>
<br>
% if($lists) {
<div id="wrapper">
<table id="first">
<tr>
<th align="right">Pos.</th><th align="left">Word</th><th align="right">Cosine dist.</th>
</tr>
% my $j=0; my @words; my @vecs; for my $list (@$lists) {
% my $i=1; for my $item (@$list) {
% if(!grep{$_ eq $item->{word}} @words) {
% push @vecs, $item->{vector};
% push @words, $item->{word};
% }
<tr>
<td align="right">
<%= $i++ %>.
</td>
<td>
<a href="/?word=<%= $item->{word} %>">
<%= $item->{word} %>
</a>
</td>
<td align="right">
<%= sprintf("%.3f", $item->{dist}) %>
</td>
</tr>
% }
% }
</table>
<script>
% use Mojo::ByteStream 'b';
$(window).load(function() {
showMap(<%= b(Mojo::JSON::to_json({target => " $word ", words => \@words, vecs => \@vecs})); %>);
});
</script>
% }
<div id="second" style="width:800px; height:800px; font-family: arial;">
<div id="embed">
</div>
<div id="cost"></div>
</div>
</div>
<p>
Word vector model based on DeReKo-2015-II. Trained with <a href="https://code.google.com/p/word2vec/">word2vec</a> using the following parameters:</p>
<pre>
-cbow 1 -size 300 -window 7 -negative 5 -hs 0 -sample 1e-5 -threads 44 -binary 1 -iter 5
</pre>
</p>
</body>
</html>